diff --git a/.cloudbuild/jdbc_nightly.yaml b/.cloudbuild/jdbc_nightly.yaml new file mode 100644 index 0000000000..5c7acd472b --- /dev/null +++ b/.cloudbuild/jdbc_nightly.yaml @@ -0,0 +1,53 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Github action job to test core java library features on +# downstream client libraries before they are released. +options: + workerPool: 'projects/bigquery-devtools-drivers/locations/us-east1/workerPools/java-bigquery-jdbc-pool' + dynamic_substitutions: true + logging: CLOUD_LOGGING_ONLY + +timeout: 10000s +steps: +- name: 'gcr.io/cloud-devrel-public-resources/java11' + id: "IT Tests" + timeout: 10000s + entrypoint: 'bash' + args: ['.kokoro/build.sh'] + env: + - 'JOB_TYPE=jdbc-integration' + secretEnv: ['SA_EMAIL', 'KMS_RESOURCE_PATH', 'SA_SECRET'] +- name: 'gcr.io/cloud-devrel-public-resources/java11' + id: "IT Nightly Tests" + timeout: 10000s + entrypoint: 'bash' + args: ['.kokoro/build.sh'] + env: + - 'JOB_TYPE=jdbc-nightly-integration' +- name: 'gcr.io/cloud-devrel-public-resources/java11' + id: "Nightly build" + timeout: 10000s + entrypoint: 'bash' + args: ['.kokoro/jdbc-release.sh'] + env: + - 'NIGHTLY_RELEASE_GCS_BUCKET=$_NIGHTLY_RELEASE_GCS_BUCKET' + +availableSecrets: + secretManager: + - versionName: projects/$PROJECT_ID/secrets/jdbc-presubmit-sa-email/versions/latest + env: 'SA_EMAIL' + - versionName: projects/$PROJECT_ID/secrets/kms_resource_path/versions/latest + env: 'KMS_RESOURCE_PATH' + - versionName: projects/$PROJECT_ID/secrets/GoogleJDBCServiceAccountSecret/versions/latest + env: 'SA_SECRET' diff --git a/.cloudbuild/jdbc_presubmit.yaml b/.cloudbuild/jdbc_presubmit.yaml new file mode 100644 index 0000000000..c5896967ef --- /dev/null +++ b/.cloudbuild/jdbc_presubmit.yaml @@ -0,0 +1,41 @@ +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Github action job to test core java library features on +# downstream client libraries before they are released. +options: + workerPool: 'projects/bigquery-devtools-drivers/locations/us-east1/workerPools/java-bigquery-jdbc-pool' + dynamic_substitutions: true + logging: CLOUD_LOGGING_ONLY + +substitutions: + _JOB_TYPE: "jdbc-integration" + +timeout: 2000s +steps: +- name: 'gcr.io/cloud-devrel-public-resources/java11' + id: "IT Tests" + timeout: 2000s + entrypoint: 'bash' + args: ['.kokoro/build.sh'] + env: + - 'JOB_TYPE=${_JOB_TYPE}' + secretEnv: ['SA_EMAIL', 'KMS_RESOURCE_PATH', 'SA_SECRET'] +availableSecrets: + secretManager: + - versionName: projects/$PROJECT_ID/secrets/jdbc-presubmit-sa-email/versions/latest + env: 'SA_EMAIL' + - versionName: projects/$PROJECT_ID/secrets/kms_resource_path/versions/latest + env: 'KMS_RESOURCE_PATH' + - versionName: projects/$PROJECT_ID/secrets/GoogleJDBCServiceAccountSecret/versions/latest + env: 'SA_SECRET' diff --git a/.cloudbuild/samples_build.yaml b/.cloudbuild/samples_build.yaml new file mode 100644 index 0000000000..354adf9720 --- /dev/null +++ b/.cloudbuild/samples_build.yaml @@ -0,0 +1,45 @@ +steps: +- name: gcr.io/cloud-devrel-public-resources/java8 + entrypoint: ls + args: [ + '-alt', + ] +- name: gcr.io/cloud-devrel-public-resources/java8 + entrypoint: curl + args: [ + '--header', + 'Metadata-Flavor: Google', + 'http://metadata.google.internal/computeMetadata/v1/instance/service-accounts/default/email' + ] +- name: gcr.io/cloud-devrel-public-resources/java8 + entrypoint: pwd +- name: gcr.io/cloud-devrel-public-resources/java8 + entrypoint: bash + args: [ + '.kokoro/build.sh' + ] + env: + - 'JOB_TYPE=samples' + - 'BIGQUERY_PROJECT_ID=cloud-java-ci-sample' + - 'GOOGLE_CLOUD_PROJECT=cloud-java-ci-sample' + - 'GCS_BUCKET=java-samples-bigquery' + - 'BIGQUERY_TEST_TABLE=test_table' + - 'BIGQUERY_MODEL_NAME=natality_model' + - 'BIGQUERY_MODEL_TEST_PROJECT_ID=bigquery-public-data' + - 'OMNI_PROJECT_ID=sunlit-ace-276222' + - 'OMNI_EXTERNAL_TABLE_NAME=devrel_test_table' + - 'BIGQUERY_TABLE2=table2' + - 'BIGQUERY_TABLE1=table1' + - 'BIGTABLE_TESTING_INSTANCE=bigquery-samples-instance' + - 'BIGQUERY_DATASET_NAME=bigquery_test_dataset' + - 'KOKORO_GFILE_DIR=/workspace' + # This key is not available yet + - 'BIGQUERY_KMS_KEY_NAME=projects/cloud-java-ci-sample/locations/us/keyRings/bq-kms-key/cryptoKeys/bq-kms-key' +- name: gcr.io/cloud-devrel-public-resources/java8 + entrypoint: echo + args: [ + 'Sample job succeeded', + ] +timeout: 3600s +options: + defaultLogsBucketBehavior: REGIONAL_USER_OWNED_BUCKET diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f7b843e668..e3f21432d4 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,9 +4,14 @@ # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax -# The @googleapis/api-bigquery is the default owner for changes in this repo -* @googleapis/yoshi-java @googleapis/api-bigquery -**/*.java @googleapis/api-bigquery +# The @googleapis/bigquery-team is the default owner for changes in this repo +* @googleapis/cloud-sdk-java-team @googleapis/bigquery-team # The java-samples-reviewers team is the default owner for samples changes -samples/**/*.java @googleapis/java-samples-reviewers +samples/**/*.java @googleapis/cloud-sdk-java-team @googleapis/java-samples-reviewers + +# Generated snippets should not be owned by samples reviewers +samples/snippets/generated/ @googleapis/cloud-sdk-java-team + +# JDBC Driver +google-cloud-bigquery-jdbc/** @googleapis/bigquery-developer-tools-team @googleapis/cloud-sdk-java-team diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 93f1b51330..c49cd2b618 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -21,7 +21,7 @@ If you are still having issues, please include as much information as possible: General, Core, and Other are also allowed as types 2. OS type and version: 3. Java version: -4. bigquery version(s): +4. version(s): #### Steps to reproduce diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 0eddd3d4fc..51efda31d4 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -5,3 +5,6 @@ Thank you for opening a Pull Request! Before submitting your PR, there are a few - [ ] Appropriate docs were updated (if necessary) Fixes # ☕️ + +If you write sample code, please follow the [samples format]( +https://github.com/GoogleCloudPlatform/java-docs-samples/blob/main/SAMPLE_FORMAT.md). diff --git a/.github/readme/synth.py b/.github/auto-label.yaml similarity index 73% rename from .github/readme/synth.py rename to .github/auto-label.yaml index 7b48cc28d3..4caef688b7 100644 --- a/.github/readme/synth.py +++ b/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,9 +11,5 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -"""This script is used to synthesize generated the README for this library.""" - -from synthtool.languages import java - -java.custom_templates(["java_library/README.md"]) +requestsize: + enabled: true diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml index 1a23ea42b1..2176b05432 100644 --- a/.github/blunderbuss.yml +++ b/.github/blunderbuss.yml @@ -1,5 +1,5 @@ # Configuration for the Blunderbuss GitHub app. For more info see -# https://github.com/googleapis/repo-automation-bots/tree/master/packages/blunderbuss +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/blunderbuss assign_prs_by: - labels: - samples diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000000..203f9eaccf --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,19 @@ +version: 2 +updates: + - package-ecosystem: "maven" + directory: "/" + schedule: + interval: "daily" + # Disable version updates for Maven dependencies + # we use renovate-bot as well as shared-dependencies BOM to update maven dependencies. + ignore: + - dependency-name: "*" + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + # Disable version updates for pip dependencies + # If a security vulnerability comes in, we will be notified about + # it via template in the synthtool repository. + ignore: + - dependency-name: "*" diff --git a/.github/flakybot.yaml b/.github/flakybot.yaml new file mode 100644 index 0000000000..cb83375f98 --- /dev/null +++ b/.github/flakybot.yaml @@ -0,0 +1 @@ +issuePriority: p2 diff --git a/.github/generated-files-bot.yml b/.github/generated-files-bot.yml deleted file mode 100644 index 47c2ba132e..0000000000 --- a/.github/generated-files-bot.yml +++ /dev/null @@ -1,11 +0,0 @@ -externalManifests: -- type: json - file: 'synth.metadata' - jsonpath: '$.generatedFiles[*]' -- type: json - file: '.github/readme/synth.metadata/synth.metadata' - jsonpath: '$.generatedFiles[*]' -ignoreAuthors: -- 'renovate-bot' -- 'yoshi-automation' -- 'release-please[bot]' diff --git a/.github/readme/synth.metadata/synth.metadata b/.github/readme/synth.metadata/synth.metadata deleted file mode 100644 index c8dc51757a..0000000000 --- a/.github/readme/synth.metadata/synth.metadata +++ /dev/null @@ -1,18 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/java-bigquery.git", - "sha": "512994faf521e8374da743406e0cb5e366a9d57c" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "79ab0b44a2cc7d803d07c107f9faf07729fc4012" - } - } - ] -} \ No newline at end of file diff --git a/.github/release-please.yml b/.github/release-please.yml index dce2c84509..412c1b796e 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,2 +1,24 @@ +branches: + - branch: 1.127.12-sp + releaseType: java-lts + - branch: java7 + - branch: 2.3.x + releaseType: java-backport + - branch: 2.10.x + releaseType: java-backport + - branch: 2.19.x + releaseType: java-backport + - branch: 2.35.x + releaseType: java-backport + - branch: 2.40.x + releaseType: java-backport + - branch: 2.48.x + releaseType: java-backport + - branch: 2.52.x + releaseType: java-backport + - branch: 2.51.x + releaseType: java-backport + - branch: protobuf-4.x-rc + manifest: true +handleGHRelease: true releaseType: java-yoshi -bumpMinorPreMajor: true \ No newline at end of file diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 0000000000..57d2b0f830 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1,2 @@ +enabled: true +multiScmName: java-bigquery diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index d72318bf65..3649bc8ac2 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -1,52 +1,169 @@ -# Whether or not rebase-merging is enabled on this repository. -# Defaults to `true` rebaseMergeAllowed: true - -# Whether or not squash-merging is enabled on this repository. -# Defaults to `true` squashMergeAllowed: true - -# Whether or not PRs are merged with a merge commit on this repository. -# Defaults to `false` mergeCommitAllowed: false - -# Rules for master branch protection branchProtectionRules: -# Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `master` -- pattern: master - # Can admins overwrite branch protection. - # Defaults to `true` - isAdminEnforced: true - # Number of approving reviews required to update matching branches. - # Defaults to `1` - requiredApprovingReviewCount: 1 - # Are reviews from code owners required to update matching branches. - # Defaults to `false` - requiresCodeOwnerReviews: true - # Require up to date branches - requiresStrictStatusChecks: false - # List of required status check contexts that must pass for commits to be accepted to matching branches. - requiredStatusCheckContexts: - - "dependencies (8)" - - "dependencies (11)" - - "linkage-monitor" - - "lint" - - "clirr" - - "units (7)" - - "units (8)" - - "units (11)" - - "Kokoro - Test: Integration" - - "cla/google" -# List of explicit permissions to add (additive only) + - pattern: main + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (17) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - javadoc + - unmanaged_dependency_check + - pattern: 1.127.12-sp + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (8) + - dependencies (11) + - lint + - clirr + - units (7) + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - pattern: java7 + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (8) + - dependencies (11) + - lint + - clirr + - units (7) + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - pattern: 2.3.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (8) + - dependencies (11) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - pattern: 2.10.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (8) + - dependencies (11) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - pattern: 2.19.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (8) + - dependencies (11) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - pattern: 2.35.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (17) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - javadoc + - pattern: 2.40.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (17) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - javadoc + - pattern: 2.48.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (17) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - javadoc + - unmanaged_dependency_check + - pattern: 2.52.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (17) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - javadoc + - unmanaged_dependency_check + - pattern: 2.51.x + isAdminEnforced: true + requiredApprovingReviewCount: 1 + requiresCodeOwnerReviews: true + requiresStrictStatusChecks: false + requiredStatusCheckContexts: + - dependencies (17) + - lint + - clirr + - units (8) + - units (11) + - 'Kokoro - Test: Integration' + - cla/google + - javadoc + - unmanaged_dependency_check permissionRules: -# Team slug to add to repository permissions -- team: api-bigquery - # Access level required, one of push|pull|admin|maintain|triage - permission: admin -- team: yoshi-java - permission: push -- team: yoshi-admins - permission: admin -- team: yoshi-java-admins - permission: admin + - team: bigquery-team + permission: push + - team: cloud-sdk-java-team + permission: push diff --git a/.github/trusted-contribution.yml b/.github/trusted-contribution.yml index f247d5c789..88d3ac9bf1 100644 --- a/.github/trusted-contribution.yml +++ b/.github/trusted-contribution.yml @@ -1,2 +1,9 @@ trustedContributors: -- renovate-bot \ No newline at end of file +- renovate-bot +- gcf-owl-bot[bot] + +annotations: +- type: comment + text: "/gcbrun" +- type: label + text: "kokoro:force-run" diff --git a/.github/workflows/approve-readme.yaml b/.github/workflows/approve-readme.yaml index 7513acaebc..59f00b8eb6 100644 --- a/.github/workflows/approve-readme.yaml +++ b/.github/workflows/approve-readme.yaml @@ -1,3 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Github action job to test core java library features on +# downstream client libraries before they are released. on: pull_request: name: auto-merge-readme @@ -6,7 +21,7 @@ jobs: runs-on: ubuntu-latest if: github.repository_owner == 'googleapis' && github.head_ref == 'autosynth-readme' steps: - - uses: actions/github-script@v3 + - uses: actions/github-script@v7 with: github-token: ${{secrets.YOSHI_APPROVER_TOKEN}} script: | diff --git a/.github/workflows/auto-release.yaml b/.github/workflows/auto-release.yaml index 7c8816a7d7..b27e937b45 100644 --- a/.github/workflows/auto-release.yaml +++ b/.github/workflows/auto-release.yaml @@ -1,12 +1,30 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Github action job to test core java library features on +# downstream client libraries before they are released. on: pull_request: name: auto-release +permissions: + contents: read jobs: + permissions: write-all approve: runs-on: ubuntu-latest if: contains(github.head_ref, 'release-please') steps: - - uses: actions/github-script@v3 + - uses: actions/github-script@v7 with: github-token: ${{secrets.YOSHI_APPROVER_TOKEN}} debug: true @@ -16,13 +34,13 @@ jobs: return; } - // only approve PRs like "chore(master): release " - if ( !context.payload.pull_request.title.startsWith("chore(master): release") ) { + // only approve PRs like "chore(main): release " + if ( !context.payload.pull_request.title.startsWith("chore(main): release") ) { return; } // only approve PRs with pom.xml and versions.txt changes - const filesPromise = github.pulls.listFiles.endpoint({ + const filesPromise = github.rest.pulls.listFiles.endpoint({ owner: context.repo.owner, repo: context.repo.repo, pull_number: context.payload.pull_request.number, @@ -54,7 +72,7 @@ jobs: return; } - const promise = github.pulls.list.endpoint({ + const promise = github.rest.pulls.list.endpoint({ owner: context.repo.owner, repo: context.repo.repo, state: 'open' @@ -71,7 +89,7 @@ jobs: } // approve release PR - await github.pulls.createReview({ + await github.rest.pulls.createReview({ owner: context.repo.owner, repo: context.repo.repo, body: 'Rubber stamped release!', @@ -80,7 +98,7 @@ jobs: }); // attach kokoro:force-run and automerge labels - await github.issues.addLabels({ + await github.rest.issues.addLabels({ owner: context.repo.owner, repo: context.repo.repo, issue_number: context.payload.pull_request.number, diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index def8b3a2c8..b9b8be0c36 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,34 +1,72 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Github action job to test core java library features on +# downstream client libraries before they are released. on: push: branches: - - master + - main pull_request: name: ci jobs: units: runs-on: ubuntu-latest strategy: + fail-fast: false matrix: - java: [7, 8, 11] + java: [11, 17, 21, 25] steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: + distribution: temurin java-version: ${{matrix.java}} - run: java -version - run: .kokoro/build.sh env: JOB_TYPE: test - - name: coverage - uses: codecov/codecov-action@v1 - with: - name: actions ${{matrix.java}} + units-java8: + # Building using Java 17 and run the tests with Java 8 runtime + name: "units (8)" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 + with: + java-version: 8 + distribution: temurin + - name: "Set jvm system property environment variable for surefire plugin (unit tests)" + # Maven surefire plugin (unit tests) allows us to specify JVM to run the tests. + # https://maven.apache.org/surefire/maven-surefire-plugin/test-mojo.html#jvm + run: echo "SUREFIRE_JVM_OPT=-Djvm=${JAVA_HOME}/bin/java" >> $GITHUB_ENV + shell: bash + - uses: actions/setup-java@v4 + with: + java-version: 17 + distribution: temurin + - run: .kokoro/build.sh + env: + JOB_TYPE: test windows: runs-on: windows-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 + - name: Support longpaths + run: git config --system core.longpaths true + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: + distribution: temurin java-version: 8 - run: java -version - run: .kokoro/build.bat @@ -38,34 +76,35 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - java: [8, 11] + java: [17] steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: + distribution: temurin java-version: ${{matrix.java}} - run: java -version - run: .kokoro/dependencies.sh - linkage-monitor: + javadoc: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: - java-version: 8 + distribution: temurin + java-version: 17 - run: java -version - - name: Install artifacts to local Maven repository - run: .kokoro/build.sh - shell: bash - - name: Validate any conflicts with regard to com.google.cloud:libraries-bom (latest release) - uses: GoogleCloudPlatform/cloud-opensource-java/linkage-monitor@v1-linkagemonitor + - run: .kokoro/build.sh + env: + JOB_TYPE: javadoc lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: - java-version: 8 + distribution: temurin + java-version: 17 - run: java -version - run: .kokoro/build.sh env: @@ -73,11 +112,12 @@ jobs: clirr: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: + distribution: temurin java-version: 8 - run: java -version - run: .kokoro/build.sh env: - JOB_TYPE: clirr \ No newline at end of file + JOB_TYPE: clirr diff --git a/.github/workflows/renovate_config_check.yaml b/.github/workflows/renovate_config_check.yaml new file mode 100644 index 0000000000..47b9e87c98 --- /dev/null +++ b/.github/workflows/renovate_config_check.yaml @@ -0,0 +1,25 @@ +name: Renovate Bot Config Validation + +on: + pull_request: + paths: + - 'renovate.json' + +jobs: + renovate_bot_config_validation: + runs-on: ubuntu-24.04 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: '22' + + - name: Install Renovate and Config Validator + run: | + npm install -g npm@latest + npm install --global renovate + renovate-config-validator diff --git a/.github/workflows/samples.yaml b/.github/workflows/samples.yaml index c46230a78c..f833b80225 100644 --- a/.github/workflows/samples.yaml +++ b/.github/workflows/samples.yaml @@ -1,3 +1,18 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Github action job to test core java library features on +# downstream client libraries before they are released. on: pull_request: name: samples @@ -5,10 +20,11 @@ jobs: checkstyle: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-java@v1 + - uses: actions/checkout@v4 + - uses: actions/setup-java@v4 with: - java-version: 8 + distribution: temurin + java-version: 11 - name: Run checkstyle run: mvn -P lint --quiet --batch-mode checkstyle:check working-directory: samples/snippets diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml new file mode 100644 index 0000000000..69df4a2532 --- /dev/null +++ b/.github/workflows/scorecard.yml @@ -0,0 +1,72 @@ +# This workflow uses actions that are not certified by GitHub. They are provided +# by a third-party and are governed by separate terms of service, privacy +# policy, and support documentation. + +name: Scorecard supply-chain security +on: + # For Branch-Protection check. Only the default branch is supported. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection + branch_protection_rule: + # To guarantee Maintained check is occasionally updated. See + # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained + schedule: + - cron: '0 0 * * *' + push: + branches: [ "main" ] + +# Declare default permissions as read only. +permissions: read-all + +jobs: + analysis: + name: Scorecard analysis + runs-on: ubuntu-latest + permissions: + # Needed to upload the results to code-scanning dashboard. + security-events: write + # Needed to publish results and get a badge (see publish_results below). + id-token: write + # Uncomment the permissions below if installing in a private repository. + # contents: read + # actions: read + + steps: + - name: "Checkout code" + uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 + with: + persist-credentials: false + + - name: "Run analysis" + uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2 + with: + results_file: results.sarif + results_format: sarif + # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: + # - you want to enable the Branch-Protection check on a *public* repository, or + # - you are installing Scorecard on a *private* repository + # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat. + # repo_token: ${{ secrets.SCORECARD_TOKEN }} + + # Public repositories: + # - Publish results to OpenSSF REST API for easy access by consumers + # - Allows the repository to include the Scorecard badge. + # - See https://github.com/ossf/scorecard-action#publishing-results. + # For private repositories: + # - `publish_results` will always be set to `false`, regardless + # of the value entered here. + publish_results: true + + # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF + # format to the repository Actions tab. + - name: "Upload artifact" + uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0 + with: + name: SARIF file + path: results.sarif + retention-days: 5 + + # Upload the results to GitHub's code scanning dashboard. + - name: "Upload to code-scanning" + uses: github/codeql-action/upload-sarif@b8d3b6e8af63cde30bdc382c0bc28114f4346c88 # v2.28.1 + with: + sarif_file: results.sarif diff --git a/.github/workflows/unmanaged_dependency_check.yaml b/.github/workflows/unmanaged_dependency_check.yaml new file mode 100644 index 0000000000..72e66e858f --- /dev/null +++ b/.github/workflows/unmanaged_dependency_check.yaml @@ -0,0 +1,25 @@ +on: + pull_request: +name: Unmanaged dependency check +jobs: + unmanaged_dependency_check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + distribution: temurin + java-version: 11 + - name: Install modules + shell: bash + run: | + # No argument to build.sh installs the modules in local Maven + # repository + .kokoro/build.sh + - name: Unmanaged dependency check + uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v3.57.0 + with: + # java-bigquery does not produce a BOM. Fortunately the root pom.xml + # defines google-cloud-bigquery in dependencyManagement section. So + # we can treat this as the BOM to run with the check. + bom-path: ./google-cloud-bigquery-bom/pom.xml diff --git a/.gitignore b/.gitignore index 8782d86f6e..d5bcd47b28 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,14 @@ __pycache__ .settings .classpath .DS_Store +.diff.txt +.new-list.txt +.org-list.txt +SimpleBenchmarkApp/src/main/java/com/google/cloud/App.java +.flattened-pom.xml +# Local Test files +*ITLocalTest.java +# JDBC-specific files +google-cloud-bigquery-jdbc/drivers/ +google-cloud-bigquery-jdbc/jacoco* \ No newline at end of file diff --git a/.kokoro/build.bat b/.kokoro/build.bat index 05826ad93f..067cf4a4c4 100644 --- a/.kokoro/build.bat +++ b/.kokoro/build.bat @@ -1,3 +1,18 @@ +:: Copyright 2022 Google LLC +:: +:: Licensed under the Apache License, Version 2.0 (the "License"); +:: you may not use this file except in compliance with the License. +:: You may obtain a copy of the License at +:: +:: http://www.apache.org/licenses/LICENSE-2.0 +:: +:: Unless required by applicable law or agreed to in writing, software +:: distributed under the License is distributed on an "AS IS" BASIS, +:: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +:: See the License for the specific language governing permissions and +:: limitations under the License. +:: Github action job to test core java library features on +:: downstream client libraries before they are released. :: See documentation in type-shell-output.bat "C:\Program Files\Git\bin\bash.exe" %~dp0build.sh diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 6569c9dd62..9c1dfeeb3d 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -23,8 +23,8 @@ cd ${scriptDir}/.. # include common functions source ${scriptDir}/common.sh -# Print out Java version -java -version +# Print out Maven & Java version +mvn -version echo ${JOB_TYPE} # attempt to install 3 times with exponential backoff (starting with 10 seconds) @@ -37,7 +37,7 @@ retry_with_backoff 3 10 \ -Dgcloud.download.skip=true \ -T 1C -# if GOOGLE_APPLICATION_CREDIENTIALS is specified as a relative path prepend Kokoro root directory onto it +# if GOOGLE_APPLICATION_CREDENTIALS is specified as a relative path, prepend Kokoro root directory onto it if [[ ! -z "${GOOGLE_APPLICATION_CREDENTIALS}" && "${GOOGLE_APPLICATION_CREDENTIALS}" != /* ]]; then export GOOGLE_APPLICATION_CREDENTIALS=$(realpath ${KOKORO_GFILE_DIR}/${GOOGLE_APPLICATION_CREDENTIALS}) fi @@ -47,19 +47,21 @@ set +e case ${JOB_TYPE} in test) - mvn test -B -Dclirr.skip=true -Denforcer.skip=true + echo "SUREFIRE_JVM_OPT: ${SUREFIRE_JVM_OPT}" + mvn test -B -ntp -Dclirr.skip=true -Denforcer.skip=true ${SUREFIRE_JVM_OPT} RETURN_CODE=$? ;; lint) - mvn com.coveo:fmt-maven-plugin:check + mvn com.spotify.fmt:fmt-maven-plugin:check -B -ntp RETURN_CODE=$? ;; javadoc) - mvn javadoc:javadoc javadoc:test-javadoc + mvn javadoc:javadoc javadoc:test-javadoc -B -ntp RETURN_CODE=$? ;; integration) mvn -B ${INTEGRATION_TEST_ARGS} \ + -pl "!google-cloud-bigquery-jdbc" \ -ntp \ -Penable-integration-tests \ -DtrimStackTrace=false \ @@ -69,6 +71,41 @@ integration) verify RETURN_CODE=$? ;; +jdbc-integration) + mvn -B ${INTEGRATION_TEST_ARGS} \ + -pl "google-cloud-bigquery-jdbc" \ + -ntp \ + -Dtest=ITBigQueryJDBCTest \ + -DtrimStackTrace=false \ + -Dclirr.skip=true \ + -Denforcer.skip=true \ + -fae \ + verify + RETURN_CODE=$? + ;; +jdbc-nightly-integration) + mvn -B ${INTEGRATION_TEST_ARGS} \ + -pl "google-cloud-bigquery-jdbc" \ + -ntp \ + -Dtest=ITNightlyBigQueryTest \ + -DtrimStackTrace=false \ + -Dclirr.skip=true \ + -Denforcer.skip=true \ + -fae \ + -DargLine="-Xmx32g" \ + -e \ + verify + RETURN_CODE=$? + ;; +graalvm) + # Run Unit and Integration Tests with Native Image + mvn -B ${INTEGRATION_TEST_ARGS} \ + -pl "!google-cloud-bigquery-jdbc" \ + -ntp \ + -Pnative \ + test + RETURN_CODE=$? + ;; samples) SAMPLES_DIR=samples # only run ITs in snapshot/ on presubmit PRs. run ITs in all 3 samples/ subdirectories otherwise. @@ -79,9 +116,13 @@ samples) if [[ -f ${SAMPLES_DIR}/pom.xml ]] then + for FILE in ${KOKORO_GFILE_DIR}/secret_manager/*-samples-secrets; do + [[ -f "$FILE" ]] || continue + source "$FILE" + done + pushd ${SAMPLES_DIR} mvn -B \ - -Penable-samples \ -ntp \ -DtrimStackTrace=false \ -Dclirr.skip=true \ @@ -95,7 +136,7 @@ samples) fi ;; clirr) - mvn -B -Denforcer.skip=true clirr:check + mvn -B -ntp -Denforcer.skip=true clirr:check RETURN_CODE=$? ;; *) @@ -110,7 +151,7 @@ fi # fix output location of logs bash .kokoro/coerce_logs.sh -if [[ "${ENABLE_BUILD_COP}" == "true" ]] +if [[ "${ENABLE_FLAKYBOT}" == "true" ]] then chmod +x ${KOKORO_GFILE_DIR}/linux_amd64/flakybot ${KOKORO_GFILE_DIR}/linux_amd64/flakybot -repo=googleapis/java-bigquery diff --git a/.kokoro/coerce_logs.sh b/.kokoro/coerce_logs.sh index 5cf7ba49e6..46edbf7f2f 100755 --- a/.kokoro/coerce_logs.sh +++ b/.kokoro/coerce_logs.sh @@ -28,7 +28,6 @@ job=$(basename ${KOKORO_JOB_NAME}) echo "coercing sponge logs..." for xml in `find . -name *-sponge_log.xml` do - echo "processing ${xml}" class=$(basename ${xml} | cut -d- -f2) dir=$(dirname ${xml})/${job}/${class} text=$(dirname ${xml})/${class}-sponge_log.txt diff --git a/.kokoro/common.sh b/.kokoro/common.sh index ace89f45a9..f8f957af11 100644 --- a/.kokoro/common.sh +++ b/.kokoro/common.sh @@ -55,4 +55,6 @@ function retry_with_backoff { ## Helper functionss function now() { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n'; } function msg() { println "$*" >&2; } -function println() { printf '%s\n' "$(now) $*"; } \ No newline at end of file +function println() { printf '%s\n' "$(now) $*"; } + +## Helper comment to trigger updated repo dependency release \ No newline at end of file diff --git a/.kokoro/continuous/graalvm-native-a.cfg b/.kokoro/continuous/graalvm-native-a.cfg new file mode 100644 index 0000000000..ddde52219a --- /dev/null +++ b/.kokoro/continuous/graalvm-native-a.cfg @@ -0,0 +1,38 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.57.0" +} + +env_vars: { + key: "JOB_TYPE" + value: "graalvm" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "false" +} \ No newline at end of file diff --git a/.kokoro/continuous/graalvm-native-b.cfg b/.kokoro/continuous/graalvm-native-b.cfg new file mode 100644 index 0000000000..b77a9ae767 --- /dev/null +++ b/.kokoro/continuous/graalvm-native-b.cfg @@ -0,0 +1,38 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.57.0" +} + +env_vars: { + key: "JOB_TYPE" + value: "graalvm" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "false" +} \ No newline at end of file diff --git a/.kokoro/continuous/graalvm-native-c.cfg b/.kokoro/continuous/graalvm-native-c.cfg new file mode 100644 index 0000000000..70864f5274 --- /dev/null +++ b/.kokoro/continuous/graalvm-native-c.cfg @@ -0,0 +1,38 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.57.0" +} + +env_vars: { + key: "JOB_TYPE" + value: "graalvm" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "false" +} \ No newline at end of file diff --git a/.kokoro/continuous/java11-samples.cfg b/.kokoro/continuous/java11-samples.cfg deleted file mode 100644 index 0d0a7a3297..0000000000 --- a/.kokoro/continuous/java11-samples.cfg +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Format: //devtools/kokoro/config/proto/build.proto - -# Download secrets from Cloud Storage. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/java-docs-samples" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/java11" -} - -# Tell the trampoline which tests to run. -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/run_samples_tests.sh" -} \ No newline at end of file diff --git a/.kokoro/continuous/java8-samples.cfg b/.kokoro/continuous/java8-samples.cfg deleted file mode 100644 index f3c1f7aaab..0000000000 --- a/.kokoro/continuous/java8-samples.cfg +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Format: //devtools/kokoro/config/proto/build.proto - -# Download secrets from Cloud Storage. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/java-docs-samples" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/java8" -} - -# Tell trampoline which tests to run. -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/run_samples_tests.sh" -} - - - diff --git a/.kokoro/continuous/readme.cfg b/.kokoro/continuous/readme.cfg deleted file mode 100644 index e99978cc71..0000000000 --- a/.kokoro/continuous/readme.cfg +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/readme.sh" -} - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - regex: "**/*sponge_log.log" - } -} - -# The github token is stored here. -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - # TODO(theacodes): remove this after secrets have globally propagated - backend_type: FASTCONFIGPUSH - } - } -} - -# Common env vars for all repositories and builds. -env_vars: { - key: "GITHUB_USER" - value: "yoshi-automation" -} -env_vars: { - key: "GITHUB_EMAIL" - value: "yoshi-automation@google.com" -} diff --git a/.kokoro/dependencies.sh b/.kokoro/dependencies.sh index 0fb8c8436c..bd8960246f 100755 --- a/.kokoro/dependencies.sh +++ b/.kokoro/dependencies.sh @@ -14,6 +14,7 @@ # limitations under the License. set -eo pipefail +shopt -s nullglob ## Get the directory of the build script scriptDir=$(realpath $(dirname "${BASH_SOURCE[0]}")) @@ -27,63 +28,32 @@ source ${scriptDir}/common.sh java -version echo $JOB_TYPE -export MAVEN_OPTS="-Xmx1024m -XX:MaxPermSize=128m" +function determineMavenOpts() { + local javaVersion=$( + # filter down to the version line, then pull out the version between quotes, + # then trim the version number down to its minimal number (removing any + # update or suffix number). + java -version 2>&1 | grep "version" \ + | sed -E 's/^.*"(.*?)".*$/\1/g' \ + | sed -E 's/^(1\.[0-9]\.0).*$/\1/g' + ) + + if [[ $javaVersion == 17* ]] + then + # MaxPermSize is no longer supported as of jdk 17 + echo -n "-Xmx1024m" + else + echo -n "-Xmx1024m -XX:MaxPermSize=128m" + fi +} + +export MAVEN_OPTS=$(determineMavenOpts) # this should run maven enforcer retry_with_backoff 3 10 \ mvn install -B -V -ntp \ -DskipTests=true \ + -Dmaven.javadoc.skip=true \ -Dclirr.skip=true mvn -B dependency:analyze -DfailOnWarning=true - -echo "****************** DEPENDENCY LIST COMPLETENESS CHECK *******************" -## Run dependency list completeness check -function completenessCheck() { - # Output dep list with compile scope generated using the original pom - # Running mvn dependency:list on Java versions that support modules will also include the module of the dependency. - # This is stripped from the output as it is not present in the flattened pom. - # Only dependencies with 'compile' or 'runtime' scope are included from original dependency list. - msg "Generating dependency list using original pom..." - mvn dependency:list -f pom.xml -DincludeScope=runtime -Dsort=true | grep '\[INFO] .*:.*:.*:.*:.*' | sed -e s/\\s--\\smodule.*// >.org-list.txt - - # Output dep list generated using the flattened pom (only 'compile' and 'runtime' scopes) - msg "Generating dependency list using flattened pom..." - mvn dependency:list -f .flattened-pom.xml -DincludeScope=runtime -Dsort=true | grep '\[INFO] .*:.*:.*:.*:.*' >.new-list.txt - - # Compare two dependency lists - msg "Comparing dependency lists..." - diff .org-list.txt .new-list.txt >.diff.txt - if [[ $? == 0 ]] - then - msg "Success. No diff!" - else - msg "Diff found. See below: " - msg "You can also check .diff.txt file located in $1." - cat .diff.txt - return 1 - fi -} - -# Allow failures to continue running the script -set +e - -error_count=0 -for path in $(find -name ".flattened-pom.xml") -do - # Check flattened pom in each dir that contains it for completeness - dir=$(dirname "$path") - pushd "$dir" - completenessCheck "$dir" - error_count=$(($error_count + $?)) - popd -done - -if [[ $error_count == 0 ]] -then - msg "All checks passed." - exit 0 -else - msg "Errors found. See log statements above." - exit 1 -fi diff --git a/.kokoro/jdbc-release.sh b/.kokoro/jdbc-release.sh new file mode 100755 index 0000000000..01cb7b47b9 --- /dev/null +++ b/.kokoro/jdbc-release.sh @@ -0,0 +1,41 @@ +#!/bin/bash +# Copyright 2026 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -euo pipefail + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" +cd "${DIR}/../google-cloud-bigquery-jdbc" + +FOLDER="$(pwd)/release" +DATE=$(date '+%Y-%m-%d') +COMMIT=$(git rev-parse --short HEAD) +PACKAGE="google-cloud-bigquery-jdbc" +VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) +BUCKET=${NIGHTLY_RELEASE_GCS_BUCKET} +NIGHTLY_BUILD_DESTINATION="${BUCKET}/nightly/${VERSION}/${DATE}" + +# All dependencies release +mkdir -p "${FOLDER}" +make docker-package PACKAGE_DESTINATION="${FOLDER}" +NAME=${PACKAGE}-${VERSION}-${COMMIT} + +gsutil cp -r "${FOLDER}/${PACKAGE}-${VERSION}.zip" "${NIGHTLY_BUILD_DESTINATION}/${NAME}.zip" +gsutil cp -r "${FOLDER}/${PACKAGE}-${VERSION}-all.jar" "${NIGHTLY_BUILD_DESTINATION}/${NAME}-all.jar" + +rm -rf "${FOLDER}" + +# # Update latest version +gsutil cp "${NIGHTLY_BUILD_DESTINATION}/${NAME}.zip" "${BUCKET}/${PACKAGE}-latest.zip" +gsutil cp "${NIGHTLY_BUILD_DESTINATION}/${NAME}-all.jar" "${BUCKET}/${PACKAGE}-latest-all.jar" diff --git a/.kokoro/nightly/integration.cfg b/.kokoro/nightly/integration.cfg index 0048c8ece7..24b9cf6345 100644 --- a/.kokoro/nightly/integration.cfg +++ b/.kokoro/nightly/integration.cfg @@ -22,7 +22,7 @@ env_vars: { } env_vars: { - key: "ENABLE_BUILD_COP" + key: "ENABLE_FLAKYBOT" value: "true" } @@ -35,3 +35,4 @@ env_vars: { key: "SECRET_MANAGER_KEYS" value: "java-it-service-account" } + diff --git a/.kokoro/nightly/java11-integration.cfg b/.kokoro/nightly/java11-integration.cfg new file mode 100644 index 0000000000..6a6ef94eff --- /dev/null +++ b/.kokoro/nightly/java11-integration.cfg @@ -0,0 +1,38 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-public-resources/java11014" +} + +env_vars: { + key: "JOB_TYPE" + value: "integration" +} +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "true" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} + diff --git a/.kokoro/nightly/java11-samples.cfg b/.kokoro/nightly/java11-samples.cfg deleted file mode 100644 index 0d0a7a3297..0000000000 --- a/.kokoro/nightly/java11-samples.cfg +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Format: //devtools/kokoro/config/proto/build.proto - -# Download secrets from Cloud Storage. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/java-docs-samples" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/java11" -} - -# Tell the trampoline which tests to run. -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/run_samples_tests.sh" -} \ No newline at end of file diff --git a/.kokoro/nightly/java8-samples.cfg b/.kokoro/nightly/java8-samples.cfg deleted file mode 100644 index f3c1f7aaab..0000000000 --- a/.kokoro/nightly/java8-samples.cfg +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Format: //devtools/kokoro/config/proto/build.proto - -# Download secrets from Cloud Storage. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/java-docs-samples" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/java8" -} - -# Tell trampoline which tests to run. -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/run_samples_tests.sh" -} - - - diff --git a/.kokoro/nightly/nightly-integration.cfg b/.kokoro/nightly/nightly-integration.cfg new file mode 100644 index 0000000000..675f799182 --- /dev/null +++ b/.kokoro/nightly/nightly-integration.cfg @@ -0,0 +1,37 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/java8" +} + +env_vars: { + key: "JOB_TYPE" + value: "nightly-it" +} +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "java-docs-samples-testing" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "java-docs-samples-testing" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "true" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} diff --git a/.kokoro/presubmit/graalvm-native-a.cfg b/.kokoro/presubmit/graalvm-native-a.cfg new file mode 100644 index 0000000000..ddde52219a --- /dev/null +++ b/.kokoro/presubmit/graalvm-native-a.cfg @@ -0,0 +1,38 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_a:3.57.0" +} + +env_vars: { + key: "JOB_TYPE" + value: "graalvm" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "false" +} \ No newline at end of file diff --git a/.kokoro/presubmit/graalvm-native-b.cfg b/.kokoro/presubmit/graalvm-native-b.cfg new file mode 100644 index 0000000000..b77a9ae767 --- /dev/null +++ b/.kokoro/presubmit/graalvm-native-b.cfg @@ -0,0 +1,38 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_b:3.57.0" +} + +env_vars: { + key: "JOB_TYPE" + value: "graalvm" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "false" +} \ No newline at end of file diff --git a/.kokoro/presubmit/graalvm-native-c.cfg b/.kokoro/presubmit/graalvm-native-c.cfg new file mode 100644 index 0000000000..70864f5274 --- /dev/null +++ b/.kokoro/presubmit/graalvm-native-c.cfg @@ -0,0 +1,38 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-public-resources/graalvm_sdk_platform_c:3.57.0" +} + +env_vars: { + key: "JOB_TYPE" + value: "graalvm" +} + +# TODO: remove this after we've migrated all tests and scripts +env_vars: { + key: "GCLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_CLOUD_PROJECT" + value: "gcloud-devel" +} + +env_vars: { + key: "GOOGLE_APPLICATION_CREDENTIALS" + value: "secret_manager/java-it-service-account" +} + +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "java-it-service-account" +} + +env_vars: { + key: "ENABLE_FLAKYBOT" + value: "false" +} \ No newline at end of file diff --git a/.kokoro/presubmit/integration.cfg b/.kokoro/presubmit/integration.cfg index dded67a9d5..5864c603e5 100644 --- a/.kokoro/presubmit/integration.cfg +++ b/.kokoro/presubmit/integration.cfg @@ -31,3 +31,4 @@ env_vars: { key: "SECRET_MANAGER_KEYS" value: "java-it-service-account" } + diff --git a/.kokoro/presubmit/java11-samples.cfg b/.kokoro/presubmit/java11-samples.cfg deleted file mode 100644 index 0d0a7a3297..0000000000 --- a/.kokoro/presubmit/java11-samples.cfg +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Format: //devtools/kokoro/config/proto/build.proto - -# Download secrets from Cloud Storage. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/java-docs-samples" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/java11" -} - -# Tell the trampoline which tests to run. -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/run_samples_tests.sh" -} \ No newline at end of file diff --git a/.kokoro/presubmit/java8-samples.cfg b/.kokoro/presubmit/java8-samples.cfg deleted file mode 100644 index f3c1f7aaab..0000000000 --- a/.kokoro/presubmit/java8-samples.cfg +++ /dev/null @@ -1,33 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Format: //devtools/kokoro/config/proto/build.proto - -# Download secrets from Cloud Storage. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/java-docs-samples" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/java8" -} - -# Tell trampoline which tests to run. -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/run_samples_tests.sh" -} - - - diff --git a/.kokoro/presubmit/samples.cfg b/.kokoro/presubmit/samples.cfg index 01e0960047..b14a33856e 100644 --- a/.kokoro/presubmit/samples.cfg +++ b/.kokoro/presubmit/samples.cfg @@ -29,5 +29,5 @@ env_vars: { env_vars: { key: "SECRET_MANAGER_KEYS" - value: "java-docs-samples-service-account" + value: "java-docs-samples-service-account, java-bigquery-samples-secrets" } \ No newline at end of file diff --git a/.kokoro/release/bump_snapshot.cfg b/.kokoro/release/bump_snapshot.cfg deleted file mode 100644 index a9df23a26c..0000000000 --- a/.kokoro/release/bump_snapshot.cfg +++ /dev/null @@ -1,53 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "java-bigquery/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/node:10-user" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/release/bump_snapshot.sh" -} - -# tokens used by release-please to keep an up-to-date release PR. -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "github-magic-proxy-key-release-please" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "github-magic-proxy-token-release-please" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "github-magic-proxy-url-release-please" - } - } -} diff --git a/.kokoro/release/bump_snapshot.sh b/.kokoro/release/bump_snapshot.sh deleted file mode 100755 index cda46bf27a..0000000000 --- a/.kokoro/release/bump_snapshot.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash - -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -export NPM_CONFIG_PREFIX=/home/node/.npm-global - -if [ -f ${KOKORO_KEYSTORE_DIR}/73713_github-magic-proxy-url-release-please ]; then - # Groom the snapshot release PR immediately after publishing a release - npx release-please release-pr --token=${KOKORO_KEYSTORE_DIR}/73713_github-magic-proxy-token-release-please \ - --repo-url=googleapis/java-bigquery \ - --package-name="bigquery" \ - --api-url=${KOKORO_KEYSTORE_DIR}/73713_github-magic-proxy-url-release-please \ - --proxy-key=${KOKORO_KEYSTORE_DIR}/73713_github-magic-proxy-key-release-please \ - --snapshot \ - --release-type=java-auth-yoshi -fi diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg deleted file mode 100644 index 6d3d52351b..0000000000 --- a/.kokoro/release/common.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "java-bigquery/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/java8" -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 70247 - keyname: "maven-gpg-keyring" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 70247 - keyname: "maven-gpg-passphrase" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 70247 - keyname: "maven-gpg-pubkeyring" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 70247 - keyname: "sonatype-credentials" - } - } -} diff --git a/.kokoro/release/common.sh b/.kokoro/release/common.sh deleted file mode 100755 index 6e3f65999b..0000000000 --- a/.kokoro/release/common.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Get secrets from keystore and set and environment variables -setup_environment_secrets() { - export GPG_PASSPHRASE=$(cat ${KOKORO_KEYSTORE_DIR}/70247_maven-gpg-passphrase) - export GPG_TTY=$(tty) - export GPG_HOMEDIR=/gpg - mkdir $GPG_HOMEDIR - mv ${KOKORO_KEYSTORE_DIR}/70247_maven-gpg-pubkeyring $GPG_HOMEDIR/pubring.gpg - mv ${KOKORO_KEYSTORE_DIR}/70247_maven-gpg-keyring $GPG_HOMEDIR/secring.gpg - export SONATYPE_USERNAME=$(cat ${KOKORO_KEYSTORE_DIR}/70247_sonatype-credentials | cut -f1 -d'|') - export SONATYPE_PASSWORD=$(cat ${KOKORO_KEYSTORE_DIR}/70247_sonatype-credentials | cut -f2 -d'|') -} - -create_settings_xml_file() { - echo " - - - ossrh - ${SONATYPE_USERNAME} - ${SONATYPE_PASSWORD} - - - sonatype-nexus-staging - ${SONATYPE_USERNAME} - ${SONATYPE_PASSWORD} - - - sonatype-nexus-snapshots - ${SONATYPE_USERNAME} - ${SONATYPE_PASSWORD} - - -" > $1 -} \ No newline at end of file diff --git a/.kokoro/release/drop.cfg b/.kokoro/release/drop.cfg deleted file mode 100644 index 0040a6262e..0000000000 --- a/.kokoro/release/drop.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/release/drop.sh" -} diff --git a/.kokoro/release/drop.sh b/.kokoro/release/drop.sh deleted file mode 100755 index 5c4551efa2..0000000000 --- a/.kokoro/release/drop.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# STAGING_REPOSITORY_ID must be set -if [ -z "${STAGING_REPOSITORY_ID}" ]; then - echo "Missing STAGING_REPOSITORY_ID environment variable" - exit 1 -fi - -source $(dirname "$0")/common.sh -pushd $(dirname "$0")/../../ - -setup_environment_secrets -create_settings_xml_file "settings.xml" - -mvn nexus-staging:drop -B \ - --settings=settings.xml \ - -DstagingRepositoryId=${STAGING_REPOSITORY_ID} diff --git a/.kokoro/release/promote.cfg b/.kokoro/release/promote.cfg deleted file mode 100644 index 5bc0209218..0000000000 --- a/.kokoro/release/promote.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/release/promote.sh" -} diff --git a/.kokoro/release/promote.sh b/.kokoro/release/promote.sh deleted file mode 100755 index 1fa95fa537..0000000000 --- a/.kokoro/release/promote.sh +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# STAGING_REPOSITORY_ID must be set -if [ -z "${STAGING_REPOSITORY_ID}" ]; then - echo "Missing STAGING_REPOSITORY_ID environment variable" - exit 1 -fi - -source $(dirname "$0")/common.sh - -pushd $(dirname "$0")/../../ - -setup_environment_secrets -create_settings_xml_file "settings.xml" - -mvn nexus-staging:release -B \ - -DperformRelease=true \ - --settings=settings.xml \ - -DstagingRepositoryId=${STAGING_REPOSITORY_ID} diff --git a/.kokoro/release/publish_javadoc.cfg b/.kokoro/release/publish_javadoc.cfg deleted file mode 100644 index 31a5528191..0000000000 --- a/.kokoro/release/publish_javadoc.cfg +++ /dev/null @@ -1,29 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/doc-templates/" - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -# cloud-rad staging -env_vars: { - key: "STAGING_BUCKET_V2" - value: "docs-staging-v2-staging" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/release/publish_javadoc.sh" -} - - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} \ No newline at end of file diff --git a/.kokoro/release/publish_javadoc.sh b/.kokoro/release/publish_javadoc.sh deleted file mode 100755 index f0be47d224..0000000000 --- a/.kokoro/release/publish_javadoc.sh +++ /dev/null @@ -1,77 +0,0 @@ -#!/bin/bash -# Copyright 2019 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -if [[ -z "${CREDENTIALS}" ]]; then - CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account -fi - -if [[ -z "${STAGING_BUCKET}" ]]; then - echo "Need to set STAGING_BUCKET environment variable" - exit 1 -fi - -if [[ -z "${STAGING_BUCKET_V2}" ]]; then - echo "Need to set STAGING_BUCKET_V2 environment variable" - exit 1 -fi - -# work from the git root directory -pushd $(dirname "$0")/../../ - -# install docuploader package -python3 -m pip install gcp-docuploader - -# compile all packages -mvn clean install -B -q -DskipTests=true - -export NAME=google-cloud-bigquery -export VERSION=$(grep ${NAME}: versions.txt | cut -d: -f3) - -# build the docs -mvn site -B -q - -pushd target/site/apidocs - -# create metadata -python3 -m docuploader create-metadata \ - --name ${NAME} \ - --version ${VERSION} \ - --language java - -# upload docs -python3 -m docuploader upload . \ - --credentials ${CREDENTIALS} \ - --staging-bucket ${STAGING_BUCKET} - -popd - -# V2 due to problems w/ the released javadoc plugin doclava, Java 8 is required. Beware of accidental updates. - -mvn clean site -B -q -Ddevsite.template="${KOKORO_GFILE_DIR}/java/" - -pushd target/devsite/reference - -# create metadata -python3 -m docuploader create-metadata \ - --name ${NAME} \ - --version ${VERSION} \ - --language java - -# upload docs to staging bucket -python3 -m docuploader upload . \ - --credentials ${CREDENTIALS} \ - --staging-bucket ${STAGING_BUCKET_V2} diff --git a/.kokoro/release/publish_javadoc11.cfg b/.kokoro/release/publish_javadoc11.cfg deleted file mode 100644 index 60a846c10a..0000000000 --- a/.kokoro/release/publish_javadoc11.cfg +++ /dev/null @@ -1,30 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# cloud-rad production -env_vars: { - key: "STAGING_BUCKET_V2" - value: "docs-staging-v2" -} - -# Configure the docker image for kokoro-trampoline -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/java11" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/release/publish_javadoc11.sh" -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} - -# Downloads docfx doclet resource. This will be in ${KOKORO_GFILE_DIR}/ -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/docfx" diff --git a/.kokoro/release/publish_javadoc11.sh b/.kokoro/release/publish_javadoc11.sh deleted file mode 100755 index 7e59c0f9ca..0000000000 --- a/.kokoro/release/publish_javadoc11.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash -# Copyright 2021 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -if [[ -z "${CREDENTIALS}" ]]; then - CREDENTIALS=${KOKORO_KEYSTORE_DIR}/73713_docuploader_service_account -fi - -if [[ -z "${STAGING_BUCKET_V2}" ]]; then - echo "Need to set STAGING_BUCKET_V2 environment variable" - exit 1 -fi - -# work from the git root directory -pushd $(dirname "$0")/../../ - -# install docuploader package -python3 -m pip install gcp-docuploader - -# compile all packages -mvn clean install -B -q -DskipTests=true - -export NAME=google-cloud-bigquery -export VERSION=$(grep ${NAME}: versions.txt | cut -d: -f3) - -# V3 generates docfx yml from javadoc -# generate yml -mvn clean site -B -q -P docFX - -pushd target/docfx-yml - -# create metadata -python3 -m docuploader create-metadata \ - --name ${NAME} \ - --version ${VERSION} \ - --language java - -# upload yml to production bucket -python3 -m docuploader upload . \ - --credentials ${CREDENTIALS} \ - --staging-bucket ${STAGING_BUCKET_V2} \ - --destination-prefix docfx- diff --git a/.kokoro/release/snapshot.cfg b/.kokoro/release/snapshot.cfg deleted file mode 100644 index effcaa3127..0000000000 --- a/.kokoro/release/snapshot.cfg +++ /dev/null @@ -1,6 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/release/snapshot.sh" -} \ No newline at end of file diff --git a/.kokoro/release/snapshot.sh b/.kokoro/release/snapshot.sh deleted file mode 100755 index 098168a737..0000000000 --- a/.kokoro/release/snapshot.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -source $(dirname "$0")/common.sh -MAVEN_SETTINGS_FILE=$(realpath $(dirname "$0")/../../)/settings.xml -pushd $(dirname "$0")/../../ - -# ensure we're trying to push a snapshot (no-result returns non-zero exit code) -grep SNAPSHOT versions.txt - -setup_environment_secrets -create_settings_xml_file "settings.xml" - -mvn clean install deploy -B \ - --settings ${MAVEN_SETTINGS_FILE} \ - -DperformRelease=true \ - -Dgpg.executable=gpg \ - -Dgpg.passphrase=${GPG_PASSPHRASE} \ - -Dgpg.homedir=${GPG_HOMEDIR} diff --git a/.kokoro/release/stage.cfg b/.kokoro/release/stage.cfg deleted file mode 100644 index 8cd8055a99..0000000000 --- a/.kokoro/release/stage.cfg +++ /dev/null @@ -1,19 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/java-bigquery/.kokoro/release/stage.sh" -} - -# Need to save the properties file -action { - define_artifacts { - regex: "github/java-bigquery/target/nexus-staging/staging/*.properties" - strip_prefix: "github/java-bigquery" - } -} - -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} diff --git a/.kokoro/release/stage.sh b/.kokoro/release/stage.sh deleted file mode 100755 index 16126d2eb5..0000000000 --- a/.kokoro/release/stage.sh +++ /dev/null @@ -1,45 +0,0 @@ -#!/bin/bash -# Copyright 2018 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install gcp-releasetool -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -source $(dirname "$0")/common.sh -source $(dirname "$0")/../common.sh -MAVEN_SETTINGS_FILE=$(realpath $(dirname "$0")/../../)/settings.xml -pushd $(dirname "$0")/../../ - -setup_environment_secrets -create_settings_xml_file "settings.xml" - -# attempt to stage 3 times with exponential backoff (starting with 10 seconds) -retry_with_backoff 3 10 \ - mvn clean install deploy -B \ - --settings ${MAVEN_SETTINGS_FILE} \ - -DskipTests=true \ - -DperformRelease=true \ - -Dgpg.executable=gpg \ - -Dgpg.passphrase=${GPG_PASSPHRASE} \ - -Dgpg.homedir=${GPG_HOMEDIR} - -if [[ -n "${AUTORELEASE_PR}" ]] -then - mvn nexus-staging:release -B \ - -DperformRelease=true \ - --settings=settings.xml -fi \ No newline at end of file diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in new file mode 100644 index 0000000000..2092cc741d --- /dev/null +++ b/.kokoro/requirements.in @@ -0,0 +1,6 @@ +gcp-docuploader +gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x +wheel +setuptools +typing-extensions +click<8.1.0 \ No newline at end of file diff --git a/.kokoro/run_samples_resource_cleanup.sh b/.kokoro/run_samples_resource_cleanup.sh index e04daf3c64..4f89485f8b 100755 --- a/.kokoro/run_samples_resource_cleanup.sh +++ b/.kokoro/run_samples_resource_cleanup.sh @@ -30,6 +30,10 @@ source ${scriptDir}/common.sh # Setup required env variables source ${KOKORO_GFILE_DIR}/secret_manager/java-bigquery-samples-secrets +# if GOOGLE_APPLICATION_CREDENTIALS is specified as a relative path, prepend Kokoro root directory onto it +if [[ ! -z "${GOOGLE_APPLICATION_CREDENTIALS}" && "${GOOGLE_APPLICATION_CREDENTIALS}" != /* ]]; then + export GOOGLE_APPLICATION_CREDENTIALS=$(realpath ${KOKORO_GFILE_DIR}/${GOOGLE_APPLICATION_CREDENTIALS}) +fi echo "********** Successfully Set All Environment Variables **********" # Move into the samples directory diff --git a/.kokoro/run_samples_tests.sh b/.kokoro/run_samples_tests.sh deleted file mode 100755 index 02038d8f3c..0000000000 --- a/.kokoro/run_samples_tests.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash -# Copyright 2019 Google Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# `-e` enables the script to automatically fail when a command fails -# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero -set -eo pipefail - -echo "********** MAVEN INFO ***********" -mvn -v - -# Get the directory of the build script -scriptDir=$(realpath $(dirname "${BASH_SOURCE[0]}")) -## cd to the parent directory, i.e. the root of the git repo -cd ${scriptDir}/.. - -# include common functions -source ${scriptDir}/common.sh - -# Setup required env variables -source ${KOKORO_GFILE_DIR}/bigquery_secrets.txt -echo "********** Successfully Set All Environment Variables **********" - -# Attempt to install 3 times with exponential backoff (starting with 10 seconds) -retry_with_backoff 3 10 \ - mvn install -B -V \ - -DskipTests=true \ - -Dclirr.skip=true \ - -Denforcer.skip=true \ - -Dmaven.javadoc.skip=true \ - -Dgcloud.download.skip=true \ - -T 1C - -# Activate service account -gcloud auth activate-service-account \ - --key-file="$GOOGLE_APPLICATION_CREDENTIALS" \ - --project="$GOOGLE_CLOUD_PROJECT" - -# Move into the samples directory -cd samples/ - -echo -e "\n******************** RUNNING SAMPLE TESTS ********************" - -mvn --fail-at-end clean verify \ No newline at end of file diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index 9da0f83987..8b69b793c9 100644 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2018 Google Inc. +# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.repo-metadata.json b/.repo-metadata.json index cc60147c9d..bb8ab745c1 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -1,16 +1,18 @@ { - "name": "bigquery", + "api_shortname": "bigquery", "name_pretty": "Cloud BigQuery", "product_documentation": "https://cloud.google.com/bigquery", - "client_documentation": "https://googleapis.dev/java/google-cloud-bigquery/latest", + "client_documentation": "https://cloud.google.com/java/docs/reference/google-cloud-bigquery/latest/history", "api_description": "is a fully managed, NoOps, low cost data analytics service.\nData can be streamed into BigQuery at millions of rows per second to enable real-time analysis.\nWith BigQuery you can easily deploy Petabyte-scale Databases.", "issue_tracker": "https://issuetracker.google.com/savedsearches/559654", - "release_level": "ga", + "release_level": "stable", "language": "java", "repo": "googleapis/java-bigquery", "repo_short": "java-bigquery", "distribution_name": "com.google.cloud:google-cloud-bigquery", - "codeowner_team": "@googleapis/api-bigquery", + "codeowner_team": "@googleapis/bigquery-team", "api_id": "bigquery.googleapis.com", - "requires_billing": true + "library_type": "GAPIC_MANUAL", + "requires_billing": true, + "recommended_package": "com.google.cloud.bigquery" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 6088cfe2e9..632a1ced71 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,2568 @@ # Changelog +## [2.60.0](https://github.com/googleapis/java-bigquery/compare/v2.59.0...v2.60.0) (2026-02-26) + + +### Features + +* **jdbc:** Add retry mechanism for storage api ([#4100](https://github.com/googleapis/java-bigquery/issues/4100)) ([776caa1](https://github.com/googleapis/java-bigquery/commit/776caa19d5a4f54f35984a7d39375dd751d272cf)) +* **jdbc:** Enforce strict JDBC URL parsing and sync `DataSource` properties ([#4107](https://github.com/googleapis/java-bigquery/issues/4107)) ([0b0c1ce](https://github.com/googleapis/java-bigquery/commit/0b0c1ceb309a19d51f385264f2d4722b8b6339d7)) + + +### Bug Fixes + +* Improve getErrorDescFromResponse() performance ([#4110](https://github.com/googleapis/java-bigquery/issues/4110)) ([4e0b409](https://github.com/googleapis/java-bigquery/commit/4e0b4099f6b690ac89f041eb0ff6e18439647e27)) + +## [2.59.0](https://github.com/googleapis/java-bigquery/compare/v2.58.0...v2.59.0) (2026-02-12) + + +### Features + +* **jdbc:** Add `HttpTransportOptions` timeout configuration ([#4089](https://github.com/googleapis/java-bigquery/issues/4089)) ([77e0c35](https://github.com/googleapis/java-bigquery/commit/77e0c350e7d8fb5d6491e5ed6f1edfd9fecc59c8)) +* **jdbc:** Add `RequestReason` connection property ([#4094](https://github.com/googleapis/java-bigquery/issues/4094)) ([6feeb50](https://github.com/googleapis/java-bigquery/commit/6feeb50bc1f071b01c4fcfc943db7a9439b44e1f)) +* **jdbc:** Enable gRPC Keep-Alive for Storage Read API ([#4093](https://github.com/googleapis/java-bigquery/issues/4093)) ([1239a68](https://github.com/googleapis/java-bigquery/commit/1239a68d5b07bfa1fe2bbb678540896623bd6049)) + + +### Bug Fixes + +* **jdbc:** Avoid String.format in log calls ([#4096](https://github.com/googleapis/java-bigquery/issues/4096)) ([ad438dc](https://github.com/googleapis/java-bigquery/commit/ad438dc626b69f3bddda47ccf6b7a97d9053c047)) +* **jdbc:** Return null for invalid URLs in `connect()` to comply with JDBC spec ([#4086](https://github.com/googleapis/java-bigquery/issues/4086)) ([0c030d0](https://github.com/googleapis/java-bigquery/commit/0c030d06fe43ee07d510c8f2532ecde7e9de3cde)) + +## [2.58.0](https://github.com/googleapis/java-bigquery/compare/v2.57.2...v2.58.0) (2026-01-26) + + +### Features + +* Update shared config and bigquerystorage ([#4081](https://github.com/googleapis/java-bigquery/issues/4081)) ([ed575f4](https://github.com/googleapis/java-bigquery/commit/ed575f4149badfb3346f5f3657cf254ea268e0fe)) + +## [2.57.2](https://github.com/googleapis/java-bigquery/compare/v2.57.1...v2.57.2) (2026-01-16) + + +### Bug Fixes + +* Gracefully handle thread interruption in ConnectionImpl to preve… ([#4047](https://github.com/googleapis/java-bigquery/issues/4047)) ([031deb0](https://github.com/googleapis/java-bigquery/commit/031deb00f153b47d37655b025fcef298a3db0e0b)) +* Job.isDone() uses Job.Status.State if available ([#4039](https://github.com/googleapis/java-bigquery/issues/4039)) ([fa0a12e](https://github.com/googleapis/java-bigquery/commit/fa0a12e3cf171abab528c318ba3f4260b69a5274)) + + +### Documentation + +* Add specific samples for creating and query timestamps ([#4051](https://github.com/googleapis/java-bigquery/issues/4051)) ([fac16a8](https://github.com/googleapis/java-bigquery/commit/fac16a8eb05a6e13e406feeb9761259cdbf8e674)) + +## [2.57.1](https://github.com/googleapis/java-bigquery/compare/v2.57.0...v2.57.1) (2025-12-12) + + +### Dependencies + +* Update actions/upload-artifact action to v6 ([#4027](https://github.com/googleapis/java-bigquery/issues/4027)) ([5d389cf](https://github.com/googleapis/java-bigquery/commit/5d389cf45b41a0edceb3c5ed98dd2421ba6f2234)) + +## [2.57.0](https://github.com/googleapis/java-bigquery/compare/v2.56.0...v2.57.0) (2025-12-11) + + +### Features + +* Add timestamp_precision to Field ([#4014](https://github.com/googleapis/java-bigquery/issues/4014)) ([57ffe1d](https://github.com/googleapis/java-bigquery/commit/57ffe1d2ba8af3b950438c926d66ac23ca8a3093)) +* Introduce DataFormatOptions to configure the output of BigQuery data types ([#4010](https://github.com/googleapis/java-bigquery/issues/4010)) ([6dcc900](https://github.com/googleapis/java-bigquery/commit/6dcc90053353422ae766e531413b3ecc65b8b155)) +* Relax client-side validation for BigQuery entity IDs ([#4000](https://github.com/googleapis/java-bigquery/issues/4000)) ([c3548a2](https://github.com/googleapis/java-bigquery/commit/c3548a2f521b19761c844c0b24fc8caab541aba7)) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.54.2 ([#4022](https://github.com/googleapis/java-bigquery/issues/4022)) ([d2f2057](https://github.com/googleapis/java-bigquery/commit/d2f20579fd60efc36fa4239619e0d679a914cd6d)) + +## [2.56.0](https://github.com/googleapis/java-bigquery/compare/v2.55.3...v2.56.0) (2025-11-15) + + +### Features + +* New queryWithTimeout method for customer-side wait ([#3995](https://github.com/googleapis/java-bigquery/issues/3995)) ([9c0df54](https://github.com/googleapis/java-bigquery/commit/9c0df5422c05696f7ce4bedf914a58306150dc21)) + + +### Dependencies + +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20251012-2.0.0 ([#3923](https://github.com/googleapis/java-bigquery/issues/3923)) ([1d8977d](https://github.com/googleapis/java-bigquery/commit/1d8977df3b1451378e5471cce9fd8b067f80fc9a)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.54.1 ([#3994](https://github.com/googleapis/java-bigquery/issues/3994)) ([4e09f6b](https://github.com/googleapis/java-bigquery/commit/4e09f6bc7a25904ad8f61141a0837535d39dbb4e)) + +## [2.55.3](https://github.com/googleapis/java-bigquery/compare/v2.55.2...v2.55.3) (2025-10-21) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.53.0 ([#3980](https://github.com/googleapis/java-bigquery/issues/3980)) ([a961247](https://github.com/googleapis/java-bigquery/commit/a961247e9546a9fce8da1609afd18975142c2379)) + +## [2.55.2](https://github.com/googleapis/java-bigquery/compare/v2.55.1...v2.55.2) (2025-10-08) + + +### Dependencies + +* Fix update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.17.2 ([b25095d](https://github.com/googleapis/java-bigquery/commit/b25095d23279dab178975c33f4de84612612e175)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.52.3 ([#3971](https://github.com/googleapis/java-bigquery/issues/3971)) ([f8cf508](https://github.com/googleapis/java-bigquery/commit/f8cf50833772412c4f15922bffcdf5100792948d)) + +## [2.55.1](https://github.com/googleapis/java-bigquery/compare/v2.55.0...v2.55.1) (2025-09-26) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.52.2 ([#3964](https://github.com/googleapis/java-bigquery/issues/3964)) ([6775fce](https://github.com/googleapis/java-bigquery/commit/6775fce537df9c5f4d0b1488ce28591f6aed195f)) + +## [2.55.0](https://github.com/googleapis/java-bigquery/compare/v2.54.2...v2.55.0) (2025-09-12) + + +### Features + +* **bigquery:** Add custom ExceptionHandler to BigQueryOptions ([#3937](https://github.com/googleapis/java-bigquery/issues/3937)) ([de0914d](https://github.com/googleapis/java-bigquery/commit/de0914ddbccf988294d50faf56a515e58ab3505d)) + + +### Dependencies + +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.17.0 ([#3954](https://github.com/googleapis/java-bigquery/issues/3954)) ([e73deed](https://github.com/googleapis/java-bigquery/commit/e73deed9c68a45023d02b40144c304329d6b5829)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.52.1 ([#3952](https://github.com/googleapis/java-bigquery/issues/3952)) ([79b7557](https://github.com/googleapis/java-bigquery/commit/79b7557501d318fd92b90a681036fe6a1aa1bac4)) + +## [2.54.2](https://github.com/googleapis/java-bigquery/compare/v2.54.1...v2.54.2) (2025-08-26) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.52.0 ([#3939](https://github.com/googleapis/java-bigquery/issues/3939)) ([794bf83](https://github.com/googleapis/java-bigquery/commit/794bf83e84efc0712638bebde5158777b9c89397)) + +## [2.54.1](https://github.com/googleapis/java-bigquery/compare/v2.54.0...v2.54.1) (2025-08-13) + + +### Bug Fixes + +* Adapt graalvm config to arrow update ([#3928](https://github.com/googleapis/java-bigquery/issues/3928)) ([ecfabc4](https://github.com/googleapis/java-bigquery/commit/ecfabc4b70922d0e697699ec5508a7328cadacf8)) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.51.0 ([#3924](https://github.com/googleapis/java-bigquery/issues/3924)) ([cb66be5](https://github.com/googleapis/java-bigquery/commit/cb66be596d1bfd0a5aed75f5a0e36d80269c7f6a)) + +## [2.54.0](https://github.com/googleapis/java-bigquery/compare/v2.53.0...v2.54.0) (2025-07-31) + + +### Features + +* **bigquery:** Add OpenTelemetry Samples ([#3899](https://github.com/googleapis/java-bigquery/issues/3899)) ([e3d9ed9](https://github.com/googleapis/java-bigquery/commit/e3d9ed92ca5d9b58b5747960d74f895ed8733ebf)) +* **bigquery:** Add otel metrics to request headers ([#3900](https://github.com/googleapis/java-bigquery/issues/3900)) ([4071e4c](https://github.com/googleapis/java-bigquery/commit/4071e4cb2547b236183fd4fbb92c73f074cf2fa0)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.70.0 ([#3890](https://github.com/googleapis/java-bigquery/issues/3890)) ([84207e2](https://github.com/googleapis/java-bigquery/commit/84207e297eec75bcb4f1cc1b64423d7c2ddd6c30)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250706-2.0.0 ([#3910](https://github.com/googleapis/java-bigquery/issues/3910)) ([ae5c971](https://github.com/googleapis/java-bigquery/commit/ae5c97146c7076e90c000fd98b797ec8e08a9cd8)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.50.2 ([#3901](https://github.com/googleapis/java-bigquery/issues/3901)) ([8205623](https://github.com/googleapis/java-bigquery/commit/82056237f194a6c99ec4fb3a4315023efdedff1b)) +* Update dependency io.opentelemetry:opentelemetry-api to v1.52.0 ([#3902](https://github.com/googleapis/java-bigquery/issues/3902)) ([772407b](https://github.com/googleapis/java-bigquery/commit/772407b12f4da005f79eafc944d4c53f0eec5c27)) +* Update dependency io.opentelemetry:opentelemetry-bom to v1.52.0 ([#3903](https://github.com/googleapis/java-bigquery/issues/3903)) ([509a6fc](https://github.com/googleapis/java-bigquery/commit/509a6fc0bb7e7a101bf0d4334a3ff1adde2cab09)) +* Update dependency io.opentelemetry:opentelemetry-context to v1.52.0 ([#3904](https://github.com/googleapis/java-bigquery/issues/3904)) ([96c1bae](https://github.com/googleapis/java-bigquery/commit/96c1bae0fcdfdfc2dbb25dcae5007c5d02111a8c)) +* Update dependency io.opentelemetry:opentelemetry-exporter-logging to v1.52.0 ([#3905](https://github.com/googleapis/java-bigquery/issues/3905)) ([28ee4c9](https://github.com/googleapis/java-bigquery/commit/28ee4c941b99b1fe3803aefbe7a8ae57100d76cb)) + +## [2.53.0](https://github.com/googleapis/java-bigquery/compare/v2.52.0...v2.53.0) (2025-07-14) + + +### Features + +* **bigquery:** Add OpenTelemetry support to BQ rpcs ([#3860](https://github.com/googleapis/java-bigquery/issues/3860)) ([e2d23c1](https://github.com/googleapis/java-bigquery/commit/e2d23c1b15f2c48a4113f82b920f5c29c4b5dfea)) +* **bigquery:** Add support for custom timezones and timestamps ([#3859](https://github.com/googleapis/java-bigquery/issues/3859)) ([e5467c9](https://github.com/googleapis/java-bigquery/commit/e5467c917c63ac066edcbcd902cc2093a39971a3)) +* Next release from main branch is 2.53.0 ([#3879](https://github.com/googleapis/java-bigquery/issues/3879)) ([c47a062](https://github.com/googleapis/java-bigquery/commit/c47a062136fea4de91190cafb1f11bac6abbbe3a)) + + +### Bug Fixes + +* Load jobs preserve ascii control characters configuration ([#3876](https://github.com/googleapis/java-bigquery/issues/3876)) ([5cfdf85](https://github.com/googleapis/java-bigquery/commit/5cfdf855fa0cf206660fd89743cbaabf3afa75a3)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.69.0 ([#3870](https://github.com/googleapis/java-bigquery/issues/3870)) ([a7f1007](https://github.com/googleapis/java-bigquery/commit/a7f1007b5242da2c0adebbb309a908d7d4db5974)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250615-2.0.0 ([#3872](https://github.com/googleapis/java-bigquery/issues/3872)) ([f081589](https://github.com/googleapis/java-bigquery/commit/f08158955b7fec3c2ced6332b6e4d76cc13f2e90)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.50.1 ([#3878](https://github.com/googleapis/java-bigquery/issues/3878)) ([0e971b8](https://github.com/googleapis/java-bigquery/commit/0e971b8ace013caa31b8a02a21038e94bebae2a5)) + + +### Documentation + +* Update maven format command ([#3877](https://github.com/googleapis/java-bigquery/issues/3877)) ([d2918da](https://github.com/googleapis/java-bigquery/commit/d2918da844cd20ca1602c6fcf9fa1df685f261fc)) + +## [2.52.0](https://github.com/googleapis/java-bigquery/compare/v2.51.0...v2.52.0) (2025-06-25) + + +### Features + +* **bigquery:** Integrate Otel in client lib ([#3747](https://github.com/googleapis/java-bigquery/issues/3747)) ([6e3e07a](https://github.com/googleapis/java-bigquery/commit/6e3e07a22b8397e1e9d5b567589e44abc55961f2)) +* **bigquery:** Integrate Otel into retries, jobs, and more ([#3842](https://github.com/googleapis/java-bigquery/issues/3842)) ([4b28c47](https://github.com/googleapis/java-bigquery/commit/4b28c479c1bc22326c8d2501354fb86ec2ce1744)) + + +### Bug Fixes + +* **bigquery:** Add MY_VIEW_DATASET_NAME_TEST_ to resource clean up sample ([#3838](https://github.com/googleapis/java-bigquery/issues/3838)) ([b1962a7](https://github.com/googleapis/java-bigquery/commit/b1962a7f0084ee4c3e248266b50406cf575cd657)) + + +### Dependencies + +* Remove version declaration of open-telemetry-bom ([#3855](https://github.com/googleapis/java-bigquery/issues/3855)) ([6f9f77d](https://github.com/googleapis/java-bigquery/commit/6f9f77d47596b00b7317c8a0d4a10c3d849ad57b)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.66.0 ([#3835](https://github.com/googleapis/java-bigquery/issues/3835)) ([69be5e7](https://github.com/googleapis/java-bigquery/commit/69be5e7345fb8ca69d633d9dc99cf6c15fa5227b)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.68.0 ([#3858](https://github.com/googleapis/java-bigquery/issues/3858)) ([d4ca353](https://github.com/googleapis/java-bigquery/commit/d4ca3535f54f3282aec133337103bbfa2c9a3653)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.49.2 ([#3853](https://github.com/googleapis/java-bigquery/issues/3853)) ([cf864df](https://github.com/googleapis/java-bigquery/commit/cf864df739bbb820e99999b7c1592a3635fea4ec)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.50.0 ([#3861](https://github.com/googleapis/java-bigquery/issues/3861)) ([eb26dee](https://github.com/googleapis/java-bigquery/commit/eb26deee37119389aee3962eea5ad67d63f26c70)) +* Update dependency io.opentelemetry:opentelemetry-bom to v1.51.0 ([#3840](https://github.com/googleapis/java-bigquery/issues/3840)) ([51321c2](https://github.com/googleapis/java-bigquery/commit/51321c22778fd41134cc0cdfc70bdc47f05883f1)) +* Update ossf/scorecard-action action to v2.4.2 ([#3810](https://github.com/googleapis/java-bigquery/issues/3810)) ([414f61d](https://github.com/googleapis/java-bigquery/commit/414f61d7efcfa568c1446bd41945d7a8e2450649)) + +## [2.51.0](https://github.com/googleapis/java-bigquery/compare/v2.50.1...v2.51.0) (2025-06-06) + + +### Features + +* **bigquery:** Job creation mode GA ([#3804](https://github.com/googleapis/java-bigquery/issues/3804)) ([a21cde8](https://github.com/googleapis/java-bigquery/commit/a21cde8994e93337326cc4a2deb4bafd1596b77f)) +* **bigquery:** Support Fine Grained ACLs for Datasets ([#3803](https://github.com/googleapis/java-bigquery/issues/3803)) ([bebf1c6](https://github.com/googleapis/java-bigquery/commit/bebf1c610e6d050c49fc05f30d3fa0247b7dfdcb)) + + +### Dependencies + +* Rollback netty.version to v4.1.119.Final ([#3827](https://github.com/googleapis/java-bigquery/issues/3827)) ([94c71a0](https://github.com/googleapis/java-bigquery/commit/94c71a090eab745c81dd9530bcdd3c8c1e734788)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.65.0 ([#3787](https://github.com/googleapis/java-bigquery/issues/3787)) ([0574ecc](https://github.com/googleapis/java-bigquery/commit/0574eccec2975738804be7d0ccb4c973459c82c9)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250511-2.0.0 ([#3794](https://github.com/googleapis/java-bigquery/issues/3794)) ([d3bf724](https://github.com/googleapis/java-bigquery/commit/d3bf724feef91469b44e1e5068738604d2b3cead)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.49.0 ([#3811](https://github.com/googleapis/java-bigquery/issues/3811)) ([2c5ede4](https://github.com/googleapis/java-bigquery/commit/2c5ede4b115cf7cdd078d54d29ce93636c1cedf5)) + +## [2.50.1](https://github.com/googleapis/java-bigquery/compare/v2.50.0...v2.50.1) (2025-05-16) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.48.0 ([#3790](https://github.com/googleapis/java-bigquery/issues/3790)) ([206f06d](https://github.com/googleapis/java-bigquery/commit/206f06de115ead53b26f09a5f4781efd279b5a73)) +* Update netty.version to v4.2.1.final ([#3780](https://github.com/googleapis/java-bigquery/issues/3780)) ([6dcd858](https://github.com/googleapis/java-bigquery/commit/6dcd858eca788a8cb571368e12b4925993e380c4)) + + +### Documentation + +* **bigquery:** Update TableResult.getTotalRows() docstring ([#3785](https://github.com/googleapis/java-bigquery/issues/3785)) ([6483588](https://github.com/googleapis/java-bigquery/commit/6483588a3c5785b95ea841f21aa38f50ecf4226d)) + +## [2.50.0](https://github.com/googleapis/java-bigquery/compare/v2.49.2...v2.50.0) (2025-05-06) + + +### Features + +* Add WRITE_TRUNCATE_DATA as an enum value for write disposition ([#3752](https://github.com/googleapis/java-bigquery/issues/3752)) ([acea61c](https://github.com/googleapis/java-bigquery/commit/acea61c20b69b44c8612ca22745458ad04bc6be4)) +* **bigquery:** Add support for reservation field in jobs. ([#3768](https://github.com/googleapis/java-bigquery/issues/3768)) ([3e97f7c](https://github.com/googleapis/java-bigquery/commit/3e97f7c0c4676fcdda0862929a69bbabc69926f2)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.63.0 ([#3770](https://github.com/googleapis/java-bigquery/issues/3770)) ([934389e](https://github.com/googleapis/java-bigquery/commit/934389eb114d8fbb10c9c125d21ec26d503dca65)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250404-2.0.0 ([#3754](https://github.com/googleapis/java-bigquery/issues/3754)) ([1381c8f](https://github.com/googleapis/java-bigquery/commit/1381c8fe6c2552eec4519304c71697302733d6c7)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250427-2.0.0 ([#3773](https://github.com/googleapis/java-bigquery/issues/3773)) ([c0795fe](https://github.com/googleapis/java-bigquery/commit/c0795fe948e0ca231dbe8fc47c470603cb48ecc8)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.46.3 ([#3772](https://github.com/googleapis/java-bigquery/issues/3772)) ([ab166b6](https://github.com/googleapis/java-bigquery/commit/ab166b6c33c574b4494368709db0443e055b4863)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.47.0 ([#3779](https://github.com/googleapis/java-bigquery/issues/3779)) ([b27434b](https://github.com/googleapis/java-bigquery/commit/b27434b8a75e74184458e920142f5575fed9ba52)) + +## [2.49.2](https://github.com/googleapis/java-bigquery/compare/v2.49.1...v2.49.2) (2025-04-26) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.46.2 ([#3756](https://github.com/googleapis/java-bigquery/issues/3756)) ([907e39f](https://github.com/googleapis/java-bigquery/commit/907e39fd467f972863deeb86356fc3bfb989a76d)) + +## [2.49.1](https://github.com/googleapis/java-bigquery/compare/v2.49.0...v2.49.1) (2025-04-24) + + +### Bug Fixes + +* Add labels to converter for listTables method ([#3735](https://github.com/googleapis/java-bigquery/issues/3735)) ([#3736](https://github.com/googleapis/java-bigquery/issues/3736)) ([8634822](https://github.com/googleapis/java-bigquery/commit/8634822e1836c5ccc0f8d0263ac57ac561578360)) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.46.0 ([#3753](https://github.com/googleapis/java-bigquery/issues/3753)) ([a335927](https://github.com/googleapis/java-bigquery/commit/a335927e16d0907d62e584f08fa8393daae40354)) +* Update netty.version to v4.2.0.final ([#3745](https://github.com/googleapis/java-bigquery/issues/3745)) ([bb811c0](https://github.com/googleapis/java-bigquery/commit/bb811c068b3efabf04fbe67dbb2979d562c604d9)) + +## [2.49.0](https://github.com/googleapis/java-bigquery/compare/v2.48.1...v2.49.0) (2025-03-20) + + +### Features + +* **bigquery:** Implement getArray in BigQueryResultImpl ([#3693](https://github.com/googleapis/java-bigquery/issues/3693)) ([e2a3f2c](https://github.com/googleapis/java-bigquery/commit/e2a3f2c1a1406bf7bc9a035dce3acfde78f0eaa4)) +* Next release from main branch is 2.49.0 ([#3706](https://github.com/googleapis/java-bigquery/issues/3706)) ([b46a6cc](https://github.com/googleapis/java-bigquery/commit/b46a6ccc959f8defb145279ea18ff2e4f1bac58f)) + + +### Bug Fixes + +* Retry ExceptionHandler not retrying on IOException ([#3668](https://github.com/googleapis/java-bigquery/issues/3668)) ([83245b9](https://github.com/googleapis/java-bigquery/commit/83245b961950ca9a993694082e533834ee364417)) + + +### Dependencies + +* Exclude io.netty:netty-common from org.apache.arrow:arrow-memor… ([#3715](https://github.com/googleapis/java-bigquery/issues/3715)) ([11b5809](https://github.com/googleapis/java-bigquery/commit/11b580949b910b38732c1c8d64704c54c260214e)) +* Update actions/upload-artifact action to v4.6.2 ([#3724](https://github.com/googleapis/java-bigquery/issues/3724)) ([426a59b](https://github.com/googleapis/java-bigquery/commit/426a59b9b999e836804f84c5cbe11d497128f0a8)) +* Update actions/upload-artifact action to v4.6.2 ([#3724](https://github.com/googleapis/java-bigquery/issues/3724)) ([483f930](https://github.com/googleapis/java-bigquery/commit/483f9305023988b3884329733d0e5fbcb6599eb1)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.61.0 ([#3703](https://github.com/googleapis/java-bigquery/issues/3703)) ([53b07b0](https://github.com/googleapis/java-bigquery/commit/53b07b0e77f6ef57c8518df2b106edace679f79a)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.62.0 ([#3726](https://github.com/googleapis/java-bigquery/issues/3726)) ([38e004b](https://github.com/googleapis/java-bigquery/commit/38e004b58134caf4f7b0d96257456930beb0e599)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250302-2.0.0 ([#3720](https://github.com/googleapis/java-bigquery/issues/3720)) ([c0b3902](https://github.com/googleapis/java-bigquery/commit/c0b39029302c51e65ea31495d837598eefbe94e8)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250313-2.0.0 ([#3723](https://github.com/googleapis/java-bigquery/issues/3723)) ([b8875a8](https://github.com/googleapis/java-bigquery/commit/b8875a895d6d5e267086e24f97d0ed5fec36b9fe)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.65.0 ([#3704](https://github.com/googleapis/java-bigquery/issues/3704)) ([53b68b1](https://github.com/googleapis/java-bigquery/commit/53b68b13a505aa5d38e56032eaeb8c95bf3e9078)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.66.0 ([#3727](https://github.com/googleapis/java-bigquery/issues/3727)) ([7339f94](https://github.com/googleapis/java-bigquery/commit/7339f94cfa53d1c988f8ef051ddd5a2d7668d430)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.45.1 ([#3714](https://github.com/googleapis/java-bigquery/issues/3714)) ([e4512aa](https://github.com/googleapis/java-bigquery/commit/e4512aa5966e7b935fa55a062d940d9db0c834b3)) +* Update dependency com.google.oauth-client:google-oauth-client-java6 to v1.39.0 ([#3710](https://github.com/googleapis/java-bigquery/issues/3710)) ([c0c6352](https://github.com/googleapis/java-bigquery/commit/c0c6352b8d02145fe9513e3e23d316e045360d2d)) +* Update dependency com.google.oauth-client:google-oauth-client-jetty to v1.39.0 ([#3711](https://github.com/googleapis/java-bigquery/issues/3711)) ([43b86e9](https://github.com/googleapis/java-bigquery/commit/43b86e91a664dd9d3edaea7b31b46ac635fb22b0)) +* Update dependency node to v22 ([#3713](https://github.com/googleapis/java-bigquery/issues/3713)) ([251def5](https://github.com/googleapis/java-bigquery/commit/251def5659d2648dff0833ba967a65435e11b643)) +* Update netty.version to v4.1.119.final ([#3717](https://github.com/googleapis/java-bigquery/issues/3717)) ([08a290a](https://github.com/googleapis/java-bigquery/commit/08a290adcfa7551ee27a58da0eaf5ac00a759b90)) + + +### Documentation + +* Update error handling comment to be more precise in samples ([#3712](https://github.com/googleapis/java-bigquery/issues/3712)) ([9eb555f](https://github.com/googleapis/java-bigquery/commit/9eb555ff61bef42a3bdfe197da8423b7bf14f493)) + +## [2.48.1](https://github.com/googleapis/java-bigquery/compare/v2.48.0...v2.48.1) (2025-02-26) + + +### Dependencies + +* Update actions/upload-artifact action to v4.6.1 ([#3691](https://github.com/googleapis/java-bigquery/issues/3691)) ([9c0edea](https://github.com/googleapis/java-bigquery/commit/9c0edea7c00b3ffbe6b6a404e4161f768acb34f2)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.60.0 ([#3680](https://github.com/googleapis/java-bigquery/issues/3680)) ([6d9a40d](https://github.com/googleapis/java-bigquery/commit/6d9a40d55a6bbcbff7df39723d33f0af2b24f66e)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250216-2.0.0 ([#3688](https://github.com/googleapis/java-bigquery/issues/3688)) ([e3beb6f](https://github.com/googleapis/java-bigquery/commit/e3beb6ffe433db8ad4087d0f27a8f0d23e7c9322)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.64.0 ([#3681](https://github.com/googleapis/java-bigquery/issues/3681)) ([9e4e261](https://github.com/googleapis/java-bigquery/commit/9e4e26116226d17cc42ae030eed284bd6674b74b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.44.0 ([#3694](https://github.com/googleapis/java-bigquery/issues/3694)) ([f69fbd3](https://github.com/googleapis/java-bigquery/commit/f69fbd371f18da6ddc43d4f32f532e684026fe16)) +* Update dependency com.google.oauth-client:google-oauth-client-java6 to v1.38.0 ([#3685](https://github.com/googleapis/java-bigquery/issues/3685)) ([53bd7af](https://github.com/googleapis/java-bigquery/commit/53bd7af47783674a3accbadb1172edbcf628ab2b)) +* Update dependency com.google.oauth-client:google-oauth-client-jetty to v1.38.0 ([#3686](https://github.com/googleapis/java-bigquery/issues/3686)) ([d71b2a3](https://github.com/googleapis/java-bigquery/commit/d71b2a34a728fb6ee1c88cdc895b87959e230b7a)) +* Update ossf/scorecard-action action to v2.4.1 ([#3690](https://github.com/googleapis/java-bigquery/issues/3690)) ([cdb61fe](https://github.com/googleapis/java-bigquery/commit/cdb61febcb1a64f6ddd3c0e3c29fa7995f1d3fa5)) + +## [2.48.0](https://github.com/googleapis/java-bigquery/compare/v2.47.0...v2.48.0) (2025-02-13) + + +### Features + +* Implement wasNull for BigQueryResultSet ([#3650](https://github.com/googleapis/java-bigquery/issues/3650)) ([c7ef94b](https://github.com/googleapis/java-bigquery/commit/c7ef94be115cd572df589385f9be801033d72d6d)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.59.0 ([#3660](https://github.com/googleapis/java-bigquery/issues/3660)) ([3a6228b](https://github.com/googleapis/java-bigquery/commit/3a6228b4adc638759d3b2725c612e97e1a3b9cec)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250128-2.0.0 ([#3667](https://github.com/googleapis/java-bigquery/issues/3667)) ([0b92af6](https://github.com/googleapis/java-bigquery/commit/0b92af6eba4a633bb514089c24b7dd19cf286789)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.63.0 ([#3661](https://github.com/googleapis/java-bigquery/issues/3661)) ([9bc8c01](https://github.com/googleapis/java-bigquery/commit/9bc8c0115dc16fb950567cd85cc7dfaa9df50d7d)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.43.0 ([#3669](https://github.com/googleapis/java-bigquery/issues/3669)) ([4d9e0ff](https://github.com/googleapis/java-bigquery/commit/4d9e0ff30269127f47484910e71fa7a21a735492)) + + +### Documentation + +* Update CONTRIBUTING.md for users without branch permissions ([#3670](https://github.com/googleapis/java-bigquery/issues/3670)) ([009b9a2](https://github.com/googleapis/java-bigquery/commit/009b9a2b3940ab66220e68ddd565710b8552cc45)) + +## [2.47.0](https://github.com/googleapis/java-bigquery/compare/v2.46.0...v2.47.0) (2025-01-29) + + +### Features + +* **bigquery:** Support resource tags for datasets in java client ([#3647](https://github.com/googleapis/java-bigquery/issues/3647)) ([01e0b74](https://github.com/googleapis/java-bigquery/commit/01e0b742b9ffeafaa89b080a39d8a66c12c1fd3b)) + + +### Bug Fixes + +* **bigquery:** Remove ReadAPI bypass in executeSelect() ([#3624](https://github.com/googleapis/java-bigquery/issues/3624)) ([fadd992](https://github.com/googleapis/java-bigquery/commit/fadd992a63fd1bc87c99cc689ed103f05de49a99)) +* Close bq read client ([#3644](https://github.com/googleapis/java-bigquery/issues/3644)) ([8833c97](https://github.com/googleapis/java-bigquery/commit/8833c97d73e3ba8e6a2061bbc55a6254b9e6668e)) + + +### Dependencies + +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20250112-2.0.0 ([#3651](https://github.com/googleapis/java-bigquery/issues/3651)) ([fd06100](https://github.com/googleapis/java-bigquery/commit/fd06100c4c18b0416d384ec1f6bdfc796b70ad9f)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.42.0 ([#3653](https://github.com/googleapis/java-bigquery/issues/3653)) ([1a14342](https://github.com/googleapis/java-bigquery/commit/1a143428c7f584db3dd6e827c2ee8fe980afe18c)) +* Update github/codeql-action action to v2.28.1 ([#3637](https://github.com/googleapis/java-bigquery/issues/3637)) ([858e517](https://github.com/googleapis/java-bigquery/commit/858e51792d98276f10fd780ef6edd0bb4a1b4f54)) + +## [2.46.0](https://github.com/googleapis/java-bigquery/compare/v2.45.0...v2.46.0) (2025-01-11) + + +### Features + +* **bigquery:** Support IAM conditions in datasets in Java client. ([#3602](https://github.com/googleapis/java-bigquery/issues/3602)) ([6696a9c](https://github.com/googleapis/java-bigquery/commit/6696a9c7d42970e3c24bda4da713a855dbe40ce5)) + + +### Bug Fixes + +* NPE when reading BigQueryResultSet from empty tables ([#3627](https://github.com/googleapis/java-bigquery/issues/3627)) ([9a0b05a](https://github.com/googleapis/java-bigquery/commit/9a0b05a3b57797b7cdd8ca9739699fc018dbd868)) +* **test:** Force usage of ReadAPI ([#3625](https://github.com/googleapis/java-bigquery/issues/3625)) ([5ca7d4a](https://github.com/googleapis/java-bigquery/commit/5ca7d4acbbc40d6ef337732464b3bbd130c86430)) + + +### Dependencies + +* Update actions/upload-artifact action to v4.5.0 ([#3620](https://github.com/googleapis/java-bigquery/issues/3620)) ([cc25099](https://github.com/googleapis/java-bigquery/commit/cc25099f81cbf94e9e2ee9db03a7d9ecd913c176)) +* Update actions/upload-artifact action to v4.6.0 ([#3633](https://github.com/googleapis/java-bigquery/issues/3633)) ([ca20aa4](https://github.com/googleapis/java-bigquery/commit/ca20aa47ea7826594975ab6aeb8498e2377f8553)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.57.0 ([#3617](https://github.com/googleapis/java-bigquery/issues/3617)) ([51370a9](https://github.com/googleapis/java-bigquery/commit/51370a92e7ab29dfce91199666f23576d2d1b64a)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.58.0 ([#3631](https://github.com/googleapis/java-bigquery/issues/3631)) ([b0ea0d5](https://github.com/googleapis/java-bigquery/commit/b0ea0d5bc4ac730b0e2eaf47e8a7441dc113686b)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241222-2.0.0 ([#3623](https://github.com/googleapis/java-bigquery/issues/3623)) ([4061922](https://github.com/googleapis/java-bigquery/commit/4061922e46135d673bfa48c00bbf284efa46e065)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.61.0 ([#3618](https://github.com/googleapis/java-bigquery/issues/3618)) ([6cba626](https://github.com/googleapis/java-bigquery/commit/6cba626ff14cebbc04fa4f6058b273de0c5dd96e)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.62.0 ([#3632](https://github.com/googleapis/java-bigquery/issues/3632)) ([e9ff265](https://github.com/googleapis/java-bigquery/commit/e9ff265041f6771a71c8c378ed3ff5fdec6e837b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.41.1 ([#3628](https://github.com/googleapis/java-bigquery/issues/3628)) ([442d217](https://github.com/googleapis/java-bigquery/commit/442d217606b7d93d26887344a7a4a01303b18b8c)) +* Update dependency com.google.oauth-client:google-oauth-client-java6 to v1.37.0 ([#3614](https://github.com/googleapis/java-bigquery/issues/3614)) ([f5faa69](https://github.com/googleapis/java-bigquery/commit/f5faa69bc5b6fdae137724df5693f8aecf27d609)) +* Update dependency com.google.oauth-client:google-oauth-client-jetty to v1.37.0 ([#3615](https://github.com/googleapis/java-bigquery/issues/3615)) ([a6c7944](https://github.com/googleapis/java-bigquery/commit/a6c79443a5e675a01ecb91e362e261a6f6ecc055)) +* Update github/codeql-action action to v2.27.9 ([#3608](https://github.com/googleapis/java-bigquery/issues/3608)) ([567ce01](https://github.com/googleapis/java-bigquery/commit/567ce01ed77d44760ddcd872a0d61abdd6a09832)) +* Update github/codeql-action action to v2.28.0 ([#3621](https://github.com/googleapis/java-bigquery/issues/3621)) ([e0e09ec](https://github.com/googleapis/java-bigquery/commit/e0e09ec4954f5b5e2f094e4c67600f38353f453c)) + +## [2.45.0](https://github.com/googleapis/java-bigquery/compare/v2.44.0...v2.45.0) (2024-12-13) + + +### Features + +* Enable Lossless Timestamps in BQ java client lib ([#3589](https://github.com/googleapis/java-bigquery/issues/3589)) ([c0b874a](https://github.com/googleapis/java-bigquery/commit/c0b874aa0150e63908450b13d019864b8cbfbfe3)) +* Introduce `java.time` methods and variables ([#3586](https://github.com/googleapis/java-bigquery/issues/3586)) ([31fb15f](https://github.com/googleapis/java-bigquery/commit/31fb15fb963c18e4c29391e9fe56dfde31577511)) + + +### Bug Fixes + +* **test:** Update schema for broken ConnImplBenchmark test ([#3574](https://github.com/googleapis/java-bigquery/issues/3574)) ([8cf4387](https://github.com/googleapis/java-bigquery/commit/8cf4387fae22c81d40635b470b216fa4c126d681)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.56.0 ([#3582](https://github.com/googleapis/java-bigquery/issues/3582)) ([616ee2a](https://github.com/googleapis/java-bigquery/commit/616ee2aa8ccf3d2975274b256252f2f249775960)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241111-2.0.0 ([#3591](https://github.com/googleapis/java-bigquery/issues/3591)) ([3eef3a9](https://github.com/googleapis/java-bigquery/commit/3eef3a9959bcfdb76c26fdf9069d9acf89f93a7a)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241115-2.0.0 ([#3601](https://github.com/googleapis/java-bigquery/issues/3601)) ([41f9adb](https://github.com/googleapis/java-bigquery/commit/41f9adbe4235329fa2bbfd0930f4113e63f72e05)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.60.0 ([#3583](https://github.com/googleapis/java-bigquery/issues/3583)) ([34dd8bc](https://github.com/googleapis/java-bigquery/commit/34dd8bc22c8188f2b61dc9939b24a8d820548e2b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.41.0 ([#3607](https://github.com/googleapis/java-bigquery/issues/3607)) ([11499d1](https://github.com/googleapis/java-bigquery/commit/11499d16727934fd3dfa5c18226e6f20471a11ac)) +* Update github/codeql-action action to v2.27.5 ([#3588](https://github.com/googleapis/java-bigquery/issues/3588)) ([3f94075](https://github.com/googleapis/java-bigquery/commit/3f9407570fea5317aaf212b058ca1da05985eda9)) +* Update github/codeql-action action to v2.27.6 ([#3597](https://github.com/googleapis/java-bigquery/issues/3597)) ([bc1f3b9](https://github.com/googleapis/java-bigquery/commit/bc1f3b97a0c8ccc6e93a07b2f0ebcf8e05da9b48)) +* Update github/codeql-action action to v2.27.7 ([#3603](https://github.com/googleapis/java-bigquery/issues/3603)) ([528426b](https://github.com/googleapis/java-bigquery/commit/528426bf9b7801b1b9b45758b560f14a4c9bbc57)) + + +### Documentation + +* **bigquery:** Add javadoc description of timestamp() parameter. ([#3604](https://github.com/googleapis/java-bigquery/issues/3604)) ([6ee0c10](https://github.com/googleapis/java-bigquery/commit/6ee0c103771ef678f66cc7a584bdce27e21f29c4)) + +## [2.44.0](https://github.com/googleapis/java-bigquery/compare/v2.43.3...v2.44.0) (2024-11-17) + + +### Features + +* Enable maxTimeTravelHours in BigQuery java client library ([#3555](https://github.com/googleapis/java-bigquery/issues/3555)) ([bd24fd8](https://github.com/googleapis/java-bigquery/commit/bd24fd8c550bfbd1207b194ed5c863a4a9924d48)) + + +### Bug Fixes + +* Update experimental methods documentation to [@internalapi](https://github.com/internalapi) ([#3552](https://github.com/googleapis/java-bigquery/issues/3552)) ([20826f1](https://github.com/googleapis/java-bigquery/commit/20826f1b08a3cc5bdcce5637b7ea21d467b2bce2)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.55.0 ([#3559](https://github.com/googleapis/java-bigquery/issues/3559)) ([950ad0c](https://github.com/googleapis/java-bigquery/commit/950ad0cce6370e332a568d3b2e9ef3911503d206)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241027-2.0.0 ([#3568](https://github.com/googleapis/java-bigquery/issues/3568)) ([b5ccfcc](https://github.com/googleapis/java-bigquery/commit/b5ccfccb552e731ccb09be923715849a4282d44d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.59.0 ([#3561](https://github.com/googleapis/java-bigquery/issues/3561)) ([1bd24a1](https://github.com/googleapis/java-bigquery/commit/1bd24a1ad28d168587b7cba95ec348cb1308a803)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.40.0 ([#3576](https://github.com/googleapis/java-bigquery/issues/3576)) ([d5fa951](https://github.com/googleapis/java-bigquery/commit/d5fa951b8255ec1bcbcdf9bb3c29f247e38a0c7e)) +* Update github/codeql-action action to v2.27.1 ([#3567](https://github.com/googleapis/java-bigquery/issues/3567)) ([e154ee3](https://github.com/googleapis/java-bigquery/commit/e154ee300485dc9d900343a8b5ceb7f6633bc3ff)) +* Update github/codeql-action action to v2.27.3 ([#3569](https://github.com/googleapis/java-bigquery/issues/3569)) ([3707a40](https://github.com/googleapis/java-bigquery/commit/3707a402039365c49e1976a388593f621231dc02)) +* Update github/codeql-action action to v2.27.4 ([#3572](https://github.com/googleapis/java-bigquery/issues/3572)) ([2c7b4f7](https://github.com/googleapis/java-bigquery/commit/2c7b4f750f4c8bf03c0ba74402d745341382a209)) + + +### Documentation + +* Fix BigQuery documentation formating ([#3565](https://github.com/googleapis/java-bigquery/issues/3565)) ([552f491](https://github.com/googleapis/java-bigquery/commit/552f49132af370f66aa1ccdde86e6280f638da22)) + +## [2.43.3](https://github.com/googleapis/java-bigquery/compare/v2.43.2...v2.43.3) (2024-10-29) + + +### Dependencies + +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.10.2 ([19fc184](https://github.com/googleapis/java-bigquery/commit/19fc1843f7db8ab6fb361bf7f8119014033bc1c6)) + +## [2.43.2](https://github.com/googleapis/java-bigquery/compare/v2.43.1...v2.43.2) (2024-10-27) + + +### Dependencies + +* Update actions/checkout action to v4.2.2 ([#3541](https://github.com/googleapis/java-bigquery/issues/3541)) ([c36c123](https://github.com/googleapis/java-bigquery/commit/c36c123f5cd298b1481c9073ac9f5e634b0e1e68)) +* Update actions/upload-artifact action to v4.4.2 ([#3524](https://github.com/googleapis/java-bigquery/issues/3524)) ([776a554](https://github.com/googleapis/java-bigquery/commit/776a5541cc94e8ffb1f5e5c6969ae06585571b45)) +* Update actions/upload-artifact action to v4.4.3 ([#3530](https://github.com/googleapis/java-bigquery/issues/3530)) ([2f87fd9](https://github.com/googleapis/java-bigquery/commit/2f87fd9d777175cb5a8e5b0dc55f07546351e504)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.54.0 ([#3532](https://github.com/googleapis/java-bigquery/issues/3532)) ([25be311](https://github.com/googleapis/java-bigquery/commit/25be311c1477db0993a5825a2b839a295170790f)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20241013-2.0.0 ([#3544](https://github.com/googleapis/java-bigquery/issues/3544)) ([0c42092](https://github.com/googleapis/java-bigquery/commit/0c42092e34912d21a4d13f041577056faadf914a)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.10.0 ([0bd3c86](https://github.com/googleapis/java-bigquery/commit/0bd3c862636271c5a851fcd229b4cf6878a8c5d4)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.10.1 ([c03a63a](https://github.com/googleapis/java-bigquery/commit/c03a63a0da4f4915e9761dc1ca7429c46748688c)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.58.0 ([#3533](https://github.com/googleapis/java-bigquery/issues/3533)) ([cad2643](https://github.com/googleapis/java-bigquery/commit/cad26430f21a37eec2b87ea417f0cf67dcf9c97a)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.38.0 ([#3542](https://github.com/googleapis/java-bigquery/issues/3542)) ([16448ee](https://github.com/googleapis/java-bigquery/commit/16448eec7c7f00a113c923a0fcde463c8ac91f9b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.39.0 ([#3548](https://github.com/googleapis/java-bigquery/issues/3548)) ([616b2f6](https://github.com/googleapis/java-bigquery/commit/616b2f611f313994bf0ec2889daea3b569a84baf)) +* Update github/codeql-action action to v2.26.13 ([#3536](https://github.com/googleapis/java-bigquery/issues/3536)) ([844744f](https://github.com/googleapis/java-bigquery/commit/844744f3dea804a31abc806592f557a26cffbab4)) +* Update github/codeql-action action to v2.27.0 ([#3540](https://github.com/googleapis/java-bigquery/issues/3540)) ([1616a0f](https://github.com/googleapis/java-bigquery/commit/1616a0f6057916e21f3b4a6d418d1431d8d1fa16)) + + +### Documentation + +* Reformat javadoc ([#3545](https://github.com/googleapis/java-bigquery/issues/3545)) ([4763f73](https://github.com/googleapis/java-bigquery/commit/4763f73ad854ca4bfdddbbdc0bb43fe639238665)) +* Update SimpleApp to explicitly set project id ([#3534](https://github.com/googleapis/java-bigquery/issues/3534)) ([903a0f7](https://github.com/googleapis/java-bigquery/commit/903a0f7db0926f3d166eebada1710413056fb4a2)) + +## [2.43.1](https://github.com/googleapis/java-bigquery/compare/v2.43.0...v2.43.1) (2024-10-09) + + +### Dependencies + +* Update actions/checkout action to v4.2.1 ([#3520](https://github.com/googleapis/java-bigquery/issues/3520)) ([ad8175a](https://github.com/googleapis/java-bigquery/commit/ad8175af06d5308a9366f8109055d61c115a4852)) +* Update actions/upload-artifact action to v4.4.1 ([#3521](https://github.com/googleapis/java-bigquery/issues/3521)) ([dc21975](https://github.com/googleapis/java-bigquery/commit/dc21975cc6f3597d8f789f12a58feaa5b9b94da0)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240919-2.0.0 ([#3514](https://github.com/googleapis/java-bigquery/issues/3514)) ([9fe3829](https://github.com/googleapis/java-bigquery/commit/9fe382927ff4718252e22ac20c4e012f490e6b0e)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.37.0 ([bf4d37a](https://github.com/googleapis/java-bigquery/commit/bf4d37a15f13ada3cf0045b2d45355193d2c2f34)) +* Update github/codeql-action action to v2.26.11 ([#3517](https://github.com/googleapis/java-bigquery/issues/3517)) ([ac736bb](https://github.com/googleapis/java-bigquery/commit/ac736bb50bf4b2e629dcbfe7de90b846e07038e4)) +* Update github/codeql-action action to v2.26.12 ([#3522](https://github.com/googleapis/java-bigquery/issues/3522)) ([fdf8dc4](https://github.com/googleapis/java-bigquery/commit/fdf8dc4b7cb4e26939da10002e47c810d71bad6c)) + +## [2.43.0](https://github.com/googleapis/java-bigquery/compare/v2.42.4...v2.43.0) (2024-10-01) + + +### Features + +* Add max staleness to ExternalTableDefinition ([#3499](https://github.com/googleapis/java-bigquery/issues/3499)) ([f1ebd5b](https://github.com/googleapis/java-bigquery/commit/f1ebd5be5877a68f76efafc30e3b5b0763f343c5)) + +## [2.42.4](https://github.com/googleapis/java-bigquery/compare/v2.42.3...v2.42.4) (2024-09-30) + + +### Dependencies + +* Update actions/checkout action to v4.2.0 ([#3495](https://github.com/googleapis/java-bigquery/issues/3495)) ([b57fefb](https://github.com/googleapis/java-bigquery/commit/b57fefbdfee7b8dacdb12502d1df72af21323b51)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.51.0 ([#3480](https://github.com/googleapis/java-bigquery/issues/3480)) ([986b036](https://github.com/googleapis/java-bigquery/commit/986b036a022c8f68db59dd9d5944f3b724777533)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.53.0 ([#3504](https://github.com/googleapis/java-bigquery/issues/3504)) ([57ce901](https://github.com/googleapis/java-bigquery/commit/57ce9018448ebf4f09d3ecf9760054ebd117bc36)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240905-2.0.0 ([#3483](https://github.com/googleapis/java-bigquery/issues/3483)) ([a6508a2](https://github.com/googleapis/java-bigquery/commit/a6508a29f81b6729e41e827096e90f1d1bf07f4d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.55.0 ([#3481](https://github.com/googleapis/java-bigquery/issues/3481)) ([8908cfd](https://github.com/googleapis/java-bigquery/commit/8908cfd82332d09997a5538113fbe8e382f52c4a)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.57.0 ([#3505](https://github.com/googleapis/java-bigquery/issues/3505)) ([6e78f56](https://github.com/googleapis/java-bigquery/commit/6e78f56d17bb0d30b361220c86b1c66f21e9bd48)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.36.0 ([#3490](https://github.com/googleapis/java-bigquery/issues/3490)) ([a72c582](https://github.com/googleapis/java-bigquery/commit/a72c5825c93f359d295fb78e0e541752f535876b)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.36.1 ([#3496](https://github.com/googleapis/java-bigquery/issues/3496)) ([8f2e5c5](https://github.com/googleapis/java-bigquery/commit/8f2e5c542760ecd7c217c36c80cb3b5aebee6a73)) +* Update dependency ubuntu to v24 ([#3498](https://github.com/googleapis/java-bigquery/issues/3498)) ([4f87ade](https://github.com/googleapis/java-bigquery/commit/4f87adec6c010b572675f98b651f88d14323e2e2)) +* Update github/codeql-action action to v2.26.10 ([#3506](https://github.com/googleapis/java-bigquery/issues/3506)) ([ca71294](https://github.com/googleapis/java-bigquery/commit/ca712948b1adfb26bb1f9ef2250be10fe45d3424)) +* Update github/codeql-action action to v2.26.7 ([#3482](https://github.com/googleapis/java-bigquery/issues/3482)) ([e2c94b6](https://github.com/googleapis/java-bigquery/commit/e2c94b601781ebe236c25cd3f40059e7543ba387)) +* Update github/codeql-action action to v2.26.8 ([#3488](https://github.com/googleapis/java-bigquery/issues/3488)) ([a6d75de](https://github.com/googleapis/java-bigquery/commit/a6d75de60b822dcc5433afab55b5d392e6a6caf5)) +* Update github/codeql-action action to v2.26.9 ([#3494](https://github.com/googleapis/java-bigquery/issues/3494)) ([8154043](https://github.com/googleapis/java-bigquery/commit/815404319a43a8a14d1d8aaa8ab22dd924b48175)) + +## [2.42.3](https://github.com/googleapis/java-bigquery/compare/v2.42.2...v2.42.3) (2024-09-12) + + +### Dependencies + +* Update actions/upload-artifact action to v4.4.0 ([#3467](https://github.com/googleapis/java-bigquery/issues/3467)) ([08b28c5](https://github.com/googleapis/java-bigquery/commit/08b28c510a2280119a03da3caa385ec31e0c944c)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.35.0 ([#3472](https://github.com/googleapis/java-bigquery/issues/3472)) ([fa9ac5d](https://github.com/googleapis/java-bigquery/commit/fa9ac5d73ec4f21ab7d12949e413b4ee9d11aa6d)) + +## [2.42.2](https://github.com/googleapis/java-bigquery/compare/v2.42.1...v2.42.2) (2024-08-29) + + +### Bug Fixes + +* ExecuteSelect now use provided credentials instead of GOOGLE_APP… ([#3465](https://github.com/googleapis/java-bigquery/issues/3465)) ([cd82235](https://github.com/googleapis/java-bigquery/commit/cd82235475310cacf1f607a412418be97c83559f)) + + +### Dependencies + +* Update actions/upload-artifact action to v4.3.5 ([#3456](https://github.com/googleapis/java-bigquery/issues/3456)) ([f00977c](https://github.com/googleapis/java-bigquery/commit/f00977ccf60227bf1415795da5b6e0a208f21b2c)) +* Update actions/upload-artifact action to v4.3.5 ([#3462](https://github.com/googleapis/java-bigquery/issues/3462)) ([e1c6e92](https://github.com/googleapis/java-bigquery/commit/e1c6e92813c739fcd861e0622413b74c638cb547)) +* Update actions/upload-artifact action to v4.3.6 ([#3463](https://github.com/googleapis/java-bigquery/issues/3463)) ([ba91227](https://github.com/googleapis/java-bigquery/commit/ba91227b972acb1d0796d5a9470ba790dfb8d5b0)) +* Update github/codeql-action action to v2.26.6 ([#3464](https://github.com/googleapis/java-bigquery/issues/3464)) ([2aeb44d](https://github.com/googleapis/java-bigquery/commit/2aeb44d8b2ff5fa264cb14a8fc31e9494d77cb6b)) + +## [2.42.1](https://github.com/googleapis/java-bigquery/compare/v2.42.0...v2.42.1) (2024-08-27) + + +### Bug Fixes + +* NPE for executeSelect nonFast path with empty result ([#3445](https://github.com/googleapis/java-bigquery/issues/3445)) ([d0d758a](https://github.com/googleapis/java-bigquery/commit/d0d758a6e5e90502491eefa64e3a7409bdcea6a9)) + + +### Dependencies + +* Update actions/upload-artifact action to v4.3.5 ([#3420](https://github.com/googleapis/java-bigquery/issues/3420)) ([d5ec87d](https://github.com/googleapis/java-bigquery/commit/d5ec87d16f64c231c8bfd87635952cb1a04f5e25)) +* Update actions/upload-artifact action to v4.3.5 ([#3422](https://github.com/googleapis/java-bigquery/issues/3422)) ([c7d07b3](https://github.com/googleapis/java-bigquery/commit/c7d07b3f1d6fa2c2259fa7315b284bcaf48ee5f2)) +* Update actions/upload-artifact action to v4.3.5 ([#3424](https://github.com/googleapis/java-bigquery/issues/3424)) ([a9d6869](https://github.com/googleapis/java-bigquery/commit/a9d6869251fa3df80d639c6998b62992468d6625)) +* Update actions/upload-artifact action to v4.3.5 ([#3427](https://github.com/googleapis/java-bigquery/issues/3427)) ([022eb57](https://github.com/googleapis/java-bigquery/commit/022eb578ae0b6f02e943662c8d4e453590f7c209)) +* Update actions/upload-artifact action to v4.3.5 ([#3430](https://github.com/googleapis/java-bigquery/issues/3430)) ([c7aacba](https://github.com/googleapis/java-bigquery/commit/c7aacbaeddc4809e283c6dfcdedd9610eac7730f)) +* Update actions/upload-artifact action to v4.3.5 ([#3432](https://github.com/googleapis/java-bigquery/issues/3432)) ([b7e8244](https://github.com/googleapis/java-bigquery/commit/b7e8244cffdef926465e2d2700766b98ad687247)) +* Update actions/upload-artifact action to v4.3.5 ([#3436](https://github.com/googleapis/java-bigquery/issues/3436)) ([ccefd6e](https://github.com/googleapis/java-bigquery/commit/ccefd6e755042b1e4c2aaec10228abb05779ed87)) +* Update actions/upload-artifact action to v4.3.5 ([#3440](https://github.com/googleapis/java-bigquery/issues/3440)) ([916fe9a](https://github.com/googleapis/java-bigquery/commit/916fe9ad67e5162a9f24852a96e40a2051ebffbd)) +* Update actions/upload-artifact action to v4.3.5 ([#3443](https://github.com/googleapis/java-bigquery/issues/3443)) ([187f099](https://github.com/googleapis/java-bigquery/commit/187f099edbf785e3ef50ae28fce6ae194d44dfb3)) +* Update actions/upload-artifact action to v4.3.5 ([#3444](https://github.com/googleapis/java-bigquery/issues/3444)) ([04aea5e](https://github.com/googleapis/java-bigquery/commit/04aea5e1d0eeab02f8ea92ff3467c64507dc05c9)) +* Update actions/upload-artifact action to v4.3.5 ([#3449](https://github.com/googleapis/java-bigquery/issues/3449)) ([c6e93cd](https://github.com/googleapis/java-bigquery/commit/c6e93cd1996f2feca3c79bf5ec4a079bd821c0f6)) +* Update actions/upload-artifact action to v4.3.5 ([#3455](https://github.com/googleapis/java-bigquery/issues/3455)) ([fbfc106](https://github.com/googleapis/java-bigquery/commit/fbfc1064688ba594a0d232c413e6f8b54558590f)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.49.0 ([#3417](https://github.com/googleapis/java-bigquery/issues/3417)) ([66336a8](https://github.com/googleapis/java-bigquery/commit/66336a8989681a7c5c3d901c11c7fc6cef0b9fef)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.50.0 ([#3448](https://github.com/googleapis/java-bigquery/issues/3448)) ([2c12839](https://github.com/googleapis/java-bigquery/commit/2c128398b04c28ccd0844d028e2f8c467f8723f0)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240714-2.0.0 ([#3412](https://github.com/googleapis/java-bigquery/issues/3412)) ([8a48fd1](https://github.com/googleapis/java-bigquery/commit/8a48fd1eb6762e42bbdc49d1aa4ebab36c3e8e26)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240727-2.0.0 ([#3421](https://github.com/googleapis/java-bigquery/issues/3421)) ([91d780b](https://github.com/googleapis/java-bigquery/commit/91d780b0db2b9b05923b60621cf80251293be184)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240727-2.0.0 ([#3423](https://github.com/googleapis/java-bigquery/issues/3423)) ([16f350c](https://github.com/googleapis/java-bigquery/commit/16f350c28ec60dc4011b77cbda6416c9de45d431)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240727-2.0.0 ([#3428](https://github.com/googleapis/java-bigquery/issues/3428)) ([9ae6eca](https://github.com/googleapis/java-bigquery/commit/9ae6ecac3337eb19bced14b9fcd7ce74580d7326)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240803-2.0.0 ([#3435](https://github.com/googleapis/java-bigquery/issues/3435)) ([b4e20db](https://github.com/googleapis/java-bigquery/commit/b4e20db60b30dac9039407d724b8f7c816301e5c)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240815-2.0.0 ([#3454](https://github.com/googleapis/java-bigquery/issues/3454)) ([8796aee](https://github.com/googleapis/java-bigquery/commit/8796aee5f669414169dc8baf88f9121697f4cd04)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.9.0 ([c4afbef](https://github.com/googleapis/java-bigquery/commit/c4afbef9d4df03c798241d56d8988adb5724d008)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.53.0 ([#3418](https://github.com/googleapis/java-bigquery/issues/3418)) ([6cff7f0](https://github.com/googleapis/java-bigquery/commit/6cff7f0c2241223c529321e2b613f15c84ecbdcc)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.54.0 ([#3450](https://github.com/googleapis/java-bigquery/issues/3450)) ([cc9da95](https://github.com/googleapis/java-bigquery/commit/cc9da9576fa276afe069caff075c50e41e412ce1)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.34.0 ([#3433](https://github.com/googleapis/java-bigquery/issues/3433)) ([801f441](https://github.com/googleapis/java-bigquery/commit/801f44172f7be43e0649a116fb0bb556507fc572)) +* Update github/codeql-action action to v2.26.2 ([#3426](https://github.com/googleapis/java-bigquery/issues/3426)) ([0a6574f](https://github.com/googleapis/java-bigquery/commit/0a6574fa11aa83b5c899f1dcd3b1132aa4f46ebd)) +* Update github/codeql-action action to v2.26.3 ([#3438](https://github.com/googleapis/java-bigquery/issues/3438)) ([390e182](https://github.com/googleapis/java-bigquery/commit/390e1824bffef17e85d0ec142b4fcca6dff80a9c)) +* Update github/codeql-action action to v2.26.5 ([#3446](https://github.com/googleapis/java-bigquery/issues/3446)) ([58aacc5](https://github.com/googleapis/java-bigquery/commit/58aacc5a92e18b790a03c0b9b4a75062928768c2)) + + +### Documentation + +* Update iam policy sample user to be consistent with other languages ([#3429](https://github.com/googleapis/java-bigquery/issues/3429)) ([2fc15b3](https://github.com/googleapis/java-bigquery/commit/2fc15b3e9f89289f0a047bb0a6ae7fb5bb71d253)) + +## [2.42.0](https://github.com/googleapis/java-bigquery/compare/v2.41.0...v2.42.0) (2024-07-28) + + +### Features + +* Add ability to specify RetryOptions and BigQueryRetryConfig when create job and waitFor ([#3398](https://github.com/googleapis/java-bigquery/issues/3398)) ([1f91ae7](https://github.com/googleapis/java-bigquery/commit/1f91ae7fa2100a05f969a7429cb619a2b8b42dee)) +* Add additional parameters to CsvOptions and ParquetOptions ([#3370](https://github.com/googleapis/java-bigquery/issues/3370)) ([34f16fb](https://github.com/googleapis/java-bigquery/commit/34f16fbaad236f5a6db26d693efde2025913d540)) +* Add remaining Statement Types ([#3381](https://github.com/googleapis/java-bigquery/issues/3381)) ([5f39b19](https://github.com/googleapis/java-bigquery/commit/5f39b19e8839f06d956addb8d95cf05e4b60a3f1)) + + +### Bug Fixes + +* Null field mode inconsistency ([#2863](https://github.com/googleapis/java-bigquery/issues/2863)) ([b9e96e3](https://github.com/googleapis/java-bigquery/commit/b9e96e3aa738a1813ad452cf6141f792f437e8de)) + + +### Dependencies + +* Update actions/upload-artifact action to v4.3.4 ([#3382](https://github.com/googleapis/java-bigquery/issues/3382)) ([efa1aef](https://github.com/googleapis/java-bigquery/commit/efa1aef0a579baa379adbfbd2ee12f4ee5f3d987)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.48.0 ([#3374](https://github.com/googleapis/java-bigquery/issues/3374)) ([45b7f20](https://github.com/googleapis/java-bigquery/commit/45b7f20e1b324d9b77183c0f8bb5ae14724d6aef)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240616-2.0.0 ([#3368](https://github.com/googleapis/java-bigquery/issues/3368)) ([ceb270c](https://github.com/googleapis/java-bigquery/commit/ceb270c5cc2af4d69948ac89af1d72990fe1a7ee)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240623-2.0.0 ([#3384](https://github.com/googleapis/java-bigquery/issues/3384)) ([e1de34f](https://github.com/googleapis/java-bigquery/commit/e1de34f0c4c67d75bcf15f35fe86c411b61d04ac)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240629-2.0.0 ([#3392](https://github.com/googleapis/java-bigquery/issues/3392)) ([352562d](https://github.com/googleapis/java-bigquery/commit/352562da445e35a8207bcf77442130867f32e52d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.52.0 ([#3375](https://github.com/googleapis/java-bigquery/issues/3375)) ([2115c04](https://github.com/googleapis/java-bigquery/commit/2115c0448b242ddd887f2bac3d68c45847273c3d)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.33.0 ([#3405](https://github.com/googleapis/java-bigquery/issues/3405)) ([a4a9999](https://github.com/googleapis/java-bigquery/commit/a4a9999def9805b8fecbc1820cc9f6f6c1997991)) +* Update dependency org.junit.vintage:junit-vintage-engine to v5.10.3 ([#3371](https://github.com/googleapis/java-bigquery/issues/3371)) ([2e804c5](https://github.com/googleapis/java-bigquery/commit/2e804c56eeef5009cc46c7544fe9b04bfdd65d7a)) +* Update github/codeql-action action to v2.25.11 ([#3376](https://github.com/googleapis/java-bigquery/issues/3376)) ([f1e0014](https://github.com/googleapis/java-bigquery/commit/f1e0014dca5ca04522796b44ff313696d2b41176)) +* Update github/codeql-action action to v2.25.12 ([#3387](https://github.com/googleapis/java-bigquery/issues/3387)) ([af60b30](https://github.com/googleapis/java-bigquery/commit/af60b30cd774992c5d82063106471926dc6aaa6e)) +* Update github/codeql-action action to v2.25.13 ([#3395](https://github.com/googleapis/java-bigquery/issues/3395)) ([95c8d6f](https://github.com/googleapis/java-bigquery/commit/95c8d6f65c5c5355fc52a0a2b54002d8f9cdb1ef)) +* Update github/codeql-action action to v2.25.15 ([#3402](https://github.com/googleapis/java-bigquery/issues/3402)) ([a61ce7d](https://github.com/googleapis/java-bigquery/commit/a61ce7d710e2e8b000ee25ec9d295abbc2b63dd1)) +* Update ossf/scorecard-action action to v2.4.0 ([#3408](https://github.com/googleapis/java-bigquery/issues/3408)) ([66777a2](https://github.com/googleapis/java-bigquery/commit/66777a2c3c7b0462330bd1c820e2f04ad4727465)) + + +### Documentation + +* Add short mode query sample ([#3397](https://github.com/googleapis/java-bigquery/issues/3397)) ([6dca6ff](https://github.com/googleapis/java-bigquery/commit/6dca6fffe96937db87713e45f0501d64fd5b544f)) +* Add simple query connection read api sample ([#3394](https://github.com/googleapis/java-bigquery/issues/3394)) ([d407baa](https://github.com/googleapis/java-bigquery/commit/d407baa3e95ad894d4028aa46def7ca8efe930c3)) + +## [2.41.0](https://github.com/googleapis/java-bigquery/compare/v2.40.3...v2.41.0) (2024-06-25) + + +### Features + +* Add columnNameCharacterMap to LoadJobConfiguration ([#3356](https://github.com/googleapis/java-bigquery/issues/3356)) ([2f3cbe3](https://github.com/googleapis/java-bigquery/commit/2f3cbe39619bcc93cb7d504417accd84b418dd41)) +* Add MetadataCacheMode to ExternalTableDefinition ([#3351](https://github.com/googleapis/java-bigquery/issues/3351)) ([2814dc4](https://github.com/googleapis/java-bigquery/commit/2814dc49dfdd5671257b6a9933a5dd381d889dd1)) + + +### Bug Fixes + +* Add clustering value to ListTables result ([#3359](https://github.com/googleapis/java-bigquery/issues/3359)) ([5d52bc9](https://github.com/googleapis/java-bigquery/commit/5d52bc9f4ef93f84200335685901c6ac0256b769)) + + +### Dependencies + +* Update actions/checkout action to v4.1.7 ([#3349](https://github.com/googleapis/java-bigquery/issues/3349)) ([0857234](https://github.com/googleapis/java-bigquery/commit/085723491e4aca58d670c313bc18b0c044cfdca8)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240602-2.0.0 ([#3273](https://github.com/googleapis/java-bigquery/issues/3273)) ([7b7e52b](https://github.com/googleapis/java-bigquery/commit/7b7e52b339f57af752c573a222df68196f1808f5)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.32.0 ([#3360](https://github.com/googleapis/java-bigquery/issues/3360)) ([4420996](https://github.com/googleapis/java-bigquery/commit/4420996e89fef49270771bb4f01ffa4e871e7885)) +* Update github/codeql-action action to v2.25.10 ([#3348](https://github.com/googleapis/java-bigquery/issues/3348)) ([8b6feff](https://github.com/googleapis/java-bigquery/commit/8b6feffa0e8add73a7587ce1762989713c2af38b)) + +## [2.40.3](https://github.com/googleapis/java-bigquery/compare/v2.40.2...v2.40.3) (2024-06-12) + + +### Dependencies + +* Update actions/checkout action to v4.1.6 ([#3309](https://github.com/googleapis/java-bigquery/issues/3309)) ([c7d6362](https://github.com/googleapis/java-bigquery/commit/c7d6362d47cb985abf3c08f5c4e89f651480c4c8)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.46.0 ([#3328](https://github.com/googleapis/java-bigquery/issues/3328)) ([a6661ad](https://github.com/googleapis/java-bigquery/commit/a6661ade5e297102ff54d314fa55caac9201ac67)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.47.0 ([#3342](https://github.com/googleapis/java-bigquery/issues/3342)) ([79e34c2](https://github.com/googleapis/java-bigquery/commit/79e34c256ddf99a43d546788535a9e8fa0e97e6d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.50.0 ([#3330](https://github.com/googleapis/java-bigquery/issues/3330)) ([cabb0ab](https://github.com/googleapis/java-bigquery/commit/cabb0ab1bc09ba10c43a2cf109f1390268441693)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.51.0 ([#3343](https://github.com/googleapis/java-bigquery/issues/3343)) ([e3b934f](https://github.com/googleapis/java-bigquery/commit/e3b934fa133679a2d61baeea6f4de15eed287f7f)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.31.0 ([#3335](https://github.com/googleapis/java-bigquery/issues/3335)) ([0623455](https://github.com/googleapis/java-bigquery/commit/062345501c392c2a186c3cd82dee8d20ceda2a0a)) +* Update dependency com.google.oauth-client:google-oauth-client-java6 to v1.36.0 ([#3305](https://github.com/googleapis/java-bigquery/issues/3305)) ([d05e554](https://github.com/googleapis/java-bigquery/commit/d05e5547e97f52ccfdcec1d6fe167e6587dd00c6)) +* Update dependency com.google.oauth-client:google-oauth-client-jetty to v1.36.0 ([#3306](https://github.com/googleapis/java-bigquery/issues/3306)) ([0eeed66](https://github.com/googleapis/java-bigquery/commit/0eeed668b5f88f9c59ef6c1b309e7a81f5c1f0e9)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.10.2 ([#3311](https://github.com/googleapis/java-bigquery/issues/3311)) ([3912a92](https://github.com/googleapis/java-bigquery/commit/3912a9232788e09c10fc4e91ef6d65514fc106e4)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.10.2 ([#3312](https://github.com/googleapis/java-bigquery/issues/3312)) ([9737a5d](https://github.com/googleapis/java-bigquery/commit/9737a5d63d545ed197879bbd9dbfd3f1dbc15d93)) +* Update github/codeql-action action to v2.25.6 ([#3307](https://github.com/googleapis/java-bigquery/issues/3307)) ([8999d33](https://github.com/googleapis/java-bigquery/commit/8999d337b92d7030825c5a36686ddd082cadc816)) +* Update github/codeql-action action to v2.25.7 ([#3334](https://github.com/googleapis/java-bigquery/issues/3334)) ([768342d](https://github.com/googleapis/java-bigquery/commit/768342da168921251c34163b51ffc3cddfefc0ce)) +* Update github/codeql-action action to v2.25.8 ([#3338](https://github.com/googleapis/java-bigquery/issues/3338)) ([8673fe5](https://github.com/googleapis/java-bigquery/commit/8673fe55e6d33e50c32a520a848cddc25eb6088e)) + +## [2.40.2](https://github.com/googleapis/java-bigquery/compare/v2.40.1...v2.40.2) (2024-05-26) + + +### Bug Fixes + +* Fixing NPE bug by adding to if clause ([#3290](https://github.com/googleapis/java-bigquery/issues/3290)) ([127cff9](https://github.com/googleapis/java-bigquery/commit/127cff9f964c5d2d912d26276474822fd137a64b)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.45.0 ([#3295](https://github.com/googleapis/java-bigquery/issues/3295)) ([c659523](https://github.com/googleapis/java-bigquery/commit/c659523a7ca25bc12282f0e28fff18ec9221f48e)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.49.0 ([#3296](https://github.com/googleapis/java-bigquery/issues/3296)) ([7d148d5](https://github.com/googleapis/java-bigquery/commit/7d148d5bb1d6e1e6b0a421749fcbb73a6fbe61e0)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.30.1 ([#3310](https://github.com/googleapis/java-bigquery/issues/3310)) ([641f1a8](https://github.com/googleapis/java-bigquery/commit/641f1a8325f0f43aeffd135654480a721f26e4e7)) +* Update github/codeql-action action to v2.25.4 ([#3291](https://github.com/googleapis/java-bigquery/issues/3291)) ([13bb5aa](https://github.com/googleapis/java-bigquery/commit/13bb5aaa6e4bac7144a369c9fbb5ae8922eb36ee)) +* Update ossf/scorecard-action action to v2.3.3 ([#3304](https://github.com/googleapis/java-bigquery/issues/3304)) ([d096082](https://github.com/googleapis/java-bigquery/commit/d09608211aed5dc49e2b5e51affe7942403ed267)) + +## [2.40.1](https://github.com/googleapis/java-bigquery/compare/v2.40.0...v2.40.1) (2024-05-06) + + +### Dependencies + +* Update actions/checkout action ([#3286](https://github.com/googleapis/java-bigquery/issues/3286)) ([4d8f3fb](https://github.com/googleapis/java-bigquery/commit/4d8f3fb7fd3d8f6e9484c809d6690f8078ef7a30)) + +## [2.40.0](https://github.com/googleapis/java-bigquery/compare/v2.39.1...v2.40.0) (2024-05-06) + + +### Features + +* Add getStringOrDefault method to FieldValue ([#3255](https://github.com/googleapis/java-bigquery/issues/3255)) ([8bac33a](https://github.com/googleapis/java-bigquery/commit/8bac33a32e0239ffa03715ad0c6440527cb2e01e)) + + +### Dependencies + +* Update dependency com.google.cloud:sdk-platform-java-config to v3.30.0 ([#3279](https://github.com/googleapis/java-bigquery/issues/3279)) ([67f2ea4](https://github.com/googleapis/java-bigquery/commit/67f2ea47f78240b6def27241e21fd298a75920b2)) + +## [2.39.1](https://github.com/googleapis/java-bigquery/compare/v2.39.0...v2.39.1) (2024-04-29) + + +### Bug Fixes + +* @Nullable annotations on builder methods ([#3222](https://github.com/googleapis/java-bigquery/issues/3222)) ([0c5eed1](https://github.com/googleapis/java-bigquery/commit/0c5eed1a18409f120a1243bd5da1db2aa4f9c206)) + + +### Dependencies + +* Update actions/checkout action ([#3267](https://github.com/googleapis/java-bigquery/issues/3267)) ([c297ed2](https://github.com/googleapis/java-bigquery/commit/c297ed2c77e36257451b5c12e4988f3293cdbb88)) +* Update actions/upload-artifact action to v4.3.3 ([#3258](https://github.com/googleapis/java-bigquery/issues/3258)) ([5215235](https://github.com/googleapis/java-bigquery/commit/52152350a2a6218b51ebf3d7dd6beb2699064a3c)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.44.0 ([#3270](https://github.com/googleapis/java-bigquery/issues/3270)) ([ee09ab6](https://github.com/googleapis/java-bigquery/commit/ee09ab68ea2be824aaf4e3d08b67e3bfbab2977f)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v3.5.0 ([e7c6201](https://github.com/googleapis/java-bigquery/commit/e7c620119321b673c19b99adb79247cd3c52cd67)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.48.0 ([#3271](https://github.com/googleapis/java-bigquery/issues/3271)) ([3b6e0d5](https://github.com/googleapis/java-bigquery/commit/3b6e0d5e3d26b8e2de412aa926a638d72562d4a0)) +* Update github/codeql-action action to v2.25.2 ([#3260](https://github.com/googleapis/java-bigquery/issues/3260)) ([3302dc4](https://github.com/googleapis/java-bigquery/commit/3302dc46e3e2c6a173798ef7f1642d3d4cb20332)) +* Update github/codeql-action action to v2.25.3 ([#3268](https://github.com/googleapis/java-bigquery/issues/3268)) ([1cf2377](https://github.com/googleapis/java-bigquery/commit/1cf237702e16952029741c306aa57cb3558a663f)) + +## [2.39.0](https://github.com/googleapis/java-bigquery/compare/v2.38.2...v2.39.0) (2024-04-22) + + +### Features + +* Add ExportDataStats to QueryStatistics ([#3244](https://github.com/googleapis/java-bigquery/issues/3244)) ([e91be80](https://github.com/googleapis/java-bigquery/commit/e91be80ebdd39c2448914ff9aa1742f3079d0bb8)) +* Add new fields to copy job statistics ([#3205](https://github.com/googleapis/java-bigquery/issues/3205)) ([64bdda8](https://github.com/googleapis/java-bigquery/commit/64bdda84fe06726042a41f2a89ac5c067f9bc949)) +* Add Range object to allow reading range value ([#3236](https://github.com/googleapis/java-bigquery/issues/3236)) ([2c3399d](https://github.com/googleapis/java-bigquery/commit/2c3399dd10fecc01237158a3cdeee966b38746f2)) +* Add support for inserting Range values ([#3246](https://github.com/googleapis/java-bigquery/issues/3246)) ([ff1ebc6](https://github.com/googleapis/java-bigquery/commit/ff1ebc66e458519deca37275ba91650133188683)) +* Add support for ObjectMetadata ([#3217](https://github.com/googleapis/java-bigquery/issues/3217)) ([975df05](https://github.com/googleapis/java-bigquery/commit/975df05b95b714c5574155d5e09860885c4b58f2)) +* Add totalSlotMs to JobStatistics ([#3250](https://github.com/googleapis/java-bigquery/issues/3250)) ([75ea095](https://github.com/googleapis/java-bigquery/commit/75ea095b0a194d6be4951795bc3a616ace389ff2)) + + +### Bug Fixes + +* Fix BigQuery#listDatasets to include dataset location in the response ([#3238](https://github.com/googleapis/java-bigquery/issues/3238)) ([c50c17b](https://github.com/googleapis/java-bigquery/commit/c50c17bc4eedd0c34f440b697a8b26a5354c9c4f)) +* Remove @InternalApi from TableResult ([#3257](https://github.com/googleapis/java-bigquery/issues/3257)) ([19d92a1](https://github.com/googleapis/java-bigquery/commit/19d92a144cd4d86fee6dd420e574c3a1a928642c)) + + +### Dependencies + +* Update actions/checkout action ([#3256](https://github.com/googleapis/java-bigquery/issues/3256)) ([6df3a32](https://github.com/googleapis/java-bigquery/commit/6df3a325b7f71ed1eb2054dd0c3a27cfd6cda2f2)) +* Update actions/upload-artifact action to v4.3.2 ([#3248](https://github.com/googleapis/java-bigquery/issues/3248)) ([066b51f](https://github.com/googleapis/java-bigquery/commit/066b51fb088fc67c83a45a219897752876889136)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.40.0 ([#3210](https://github.com/googleapis/java-bigquery/issues/3210)) ([bf7e97e](https://github.com/googleapis/java-bigquery/commit/bf7e97e1c936a419a34529a316c4f538872dd20b)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.41.0 ([#3219](https://github.com/googleapis/java-bigquery/issues/3219)) ([9d71b8b](https://github.com/googleapis/java-bigquery/commit/9d71b8b9a9231ea5d7cfa93c7bcbb533d6a3a900)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.43.0 ([#3225](https://github.com/googleapis/java-bigquery/issues/3225)) ([a897306](https://github.com/googleapis/java-bigquery/commit/a8973067348fa09acd91c5b01f048c43fac93894)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240323-2.0.0 ([#3239](https://github.com/googleapis/java-bigquery/issues/3239)) ([2c0f48f](https://github.com/googleapis/java-bigquery/commit/2c0f48f86d3c4d5a1a682775c494a9122373858d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.44.0 ([#3211](https://github.com/googleapis/java-bigquery/issues/3211)) ([6993b51](https://github.com/googleapis/java-bigquery/commit/6993b51f8722466b846a7dd3912acbd81e04126c)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.45.0 ([#3220](https://github.com/googleapis/java-bigquery/issues/3220)) ([21ae09c](https://github.com/googleapis/java-bigquery/commit/21ae09ce2c63f790ca77cc5c4c0df16dcb123b59)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.47.0 ([#3226](https://github.com/googleapis/java-bigquery/issues/3226)) ([d45d168](https://github.com/googleapis/java-bigquery/commit/d45d168bf53a8648e2254c8c4305a5d9a390276d)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.28.1 ([#3207](https://github.com/googleapis/java-bigquery/issues/3207)) ([6204331](https://github.com/googleapis/java-bigquery/commit/6204331953b3922f5ecb1ac0c1868cb6579dd73b)) +* Update dependency org.threeten:threeten-extra to v1.8.0 ([#3242](https://github.com/googleapis/java-bigquery/issues/3242)) ([66d5efd](https://github.com/googleapis/java-bigquery/commit/66d5efded17c42514f98f4af2bc6ba826999a62a)) +* Update github/codeql-action action to v2.24.9 ([#3204](https://github.com/googleapis/java-bigquery/issues/3204)) ([7a24d3e](https://github.com/googleapis/java-bigquery/commit/7a24d3e29f32db58475c1e02ab1c13ee8941c27d)) +* Update github/codeql-action action to v2.25.1 ([#3229](https://github.com/googleapis/java-bigquery/issues/3229)) ([aeedf29](https://github.com/googleapis/java-bigquery/commit/aeedf2960700f1742e38469fd26ea70000967cfa)) + +## [2.38.2](https://github.com/googleapis/java-bigquery/compare/v2.38.1...v2.38.2) (2024-03-21) + + +### Dependencies + +* Update actions/checkout action ([#3190](https://github.com/googleapis/java-bigquery/issues/3190)) ([940e4f6](https://github.com/googleapis/java-bigquery/commit/940e4f6c656a2e0f1d2e4d6e08d42214d14fe125)) +* Update arrow.version to v15.0.1 ([#3189](https://github.com/googleapis/java-bigquery/issues/3189)) ([fb6284e](https://github.com/googleapis/java-bigquery/commit/fb6284e94d4744bb4c8f9501751bf79e04a2429b)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.39.0 ([#3186](https://github.com/googleapis/java-bigquery/issues/3186)) ([9e705a1](https://github.com/googleapis/java-bigquery/commit/9e705a140ac6fc1d1d64674dc985c35955911667)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240229-2.0.0 ([#3188](https://github.com/googleapis/java-bigquery/issues/3188)) ([a018424](https://github.com/googleapis/java-bigquery/commit/a018424ccbf3c2c554d829c97e442f4813b2c764)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.43.0 ([#3187](https://github.com/googleapis/java-bigquery/issues/3187)) ([497ff29](https://github.com/googleapis/java-bigquery/commit/497ff298d84e536161b112c6b1aa176d4d962a49)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.28.1 ([#3196](https://github.com/googleapis/java-bigquery/issues/3196)) ([61f23a3](https://github.com/googleapis/java-bigquery/commit/61f23a35d2b5cbbd66ddf35b93709a5669b5b102)) +* Update github/codeql-action action to v2.24.6 ([#3178](https://github.com/googleapis/java-bigquery/issues/3178)) ([8843cae](https://github.com/googleapis/java-bigquery/commit/8843cae621e1eede6b072b1347f2a68a36304bca)) +* Update github/codeql-action action to v2.24.7 ([#3194](https://github.com/googleapis/java-bigquery/issues/3194)) ([2e2d730](https://github.com/googleapis/java-bigquery/commit/2e2d730de9e4e49f25c20de2cfe1ae38babef830)) +* Update github/codeql-action action to v2.24.8 ([#3198](https://github.com/googleapis/java-bigquery/issues/3198)) ([bd81a56](https://github.com/googleapis/java-bigquery/commit/bd81a56a07c836abb05c1de0d42e9cd397920e99)) + +## [2.38.1](https://github.com/googleapis/java-bigquery/compare/v2.38.0...v2.38.1) (2024-03-07) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.38.0 ([#3159](https://github.com/googleapis/java-bigquery/issues/3159)) ([d6c65ab](https://github.com/googleapis/java-bigquery/commit/d6c65abb844d1cca616907cd6aeb02f2a6042916)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.42.0 ([#3160](https://github.com/googleapis/java-bigquery/issues/3160)) ([e31b5b7](https://github.com/googleapis/java-bigquery/commit/e31b5b7ea4b91ab0096bf318377dfd66d1364b3c)) +* Update dependency com.google.cloud:sdk-platform-java-config to v3.27.0 ([#3176](https://github.com/googleapis/java-bigquery/issues/3176)) ([b93e62e](https://github.com/googleapis/java-bigquery/commit/b93e62e30808d9df95fa4c268dcd37a5462056e1)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.10.1 ([#3153](https://github.com/googleapis/java-bigquery/issues/3153)) ([436f58c](https://github.com/googleapis/java-bigquery/commit/436f58cbd33546f78ae082d4261ce106f9f77a66)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.10.1 ([#3154](https://github.com/googleapis/java-bigquery/issues/3154)) ([b68ab42](https://github.com/googleapis/java-bigquery/commit/b68ab427b157a40c7e7d415b02a01f1988080e08)) +* Update github/codeql-action action to v2.24.5 ([#3165](https://github.com/googleapis/java-bigquery/issues/3165)) ([8ac7722](https://github.com/googleapis/java-bigquery/commit/8ac7722977e453d272710153180f458be6427aa4)) + +## [2.38.0](https://github.com/googleapis/java-bigquery/compare/v2.37.2...v2.38.0) (2024-02-22) + + +### Features + +* Add MetadataCacheStatistics to Job QueryStatistics ([#3133](https://github.com/googleapis/java-bigquery/issues/3133)) ([f3f387b](https://github.com/googleapis/java-bigquery/commit/f3f387b2265d527f3b5bf567c1eaf7ecdad6e096)) + + +### Dependencies + +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240211-2.0.0 ([#3152](https://github.com/googleapis/java-bigquery/issues/3152)) ([e5d6888](https://github.com/googleapis/java-bigquery/commit/e5d688872e4c125a68ed6f666bffd0a41efc3f30)) +* Update github/codeql-action action to v2.24.3 ([#3148](https://github.com/googleapis/java-bigquery/issues/3148)) ([a0a7b01](https://github.com/googleapis/java-bigquery/commit/a0a7b0186ae47fcfcf75fe4f35cce50044c6926c)) +* Update github/codeql-action action to v2.24.3 ([#3150](https://github.com/googleapis/java-bigquery/issues/3150)) ([042fcf0](https://github.com/googleapis/java-bigquery/commit/042fcf0aca46d349103211c3d04ae4b49868933c)) +* Update github/codeql-action action to v2.24.4 ([#3161](https://github.com/googleapis/java-bigquery/issues/3161)) ([531b1a0](https://github.com/googleapis/java-bigquery/commit/531b1a0b93ee19a7479a006207c30f7399869773)) + +## [2.37.2](https://github.com/googleapis/java-bigquery/compare/v2.37.1...v2.37.2) (2024-02-14) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.37.0 ([#3132](https://github.com/googleapis/java-bigquery/issues/3132)) ([3a1efc2](https://github.com/googleapis/java-bigquery/commit/3a1efc2ede4eb1de5e3cf7703dc6bdef51b263f1)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240203-2.0.0 ([#3126](https://github.com/googleapis/java-bigquery/issues/3126)) ([5e28419](https://github.com/googleapis/java-bigquery/commit/5e2841988b223bc8ac775ed2b4d38e2c26b2815b)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.41.0 ([#3135](https://github.com/googleapis/java-bigquery/issues/3135)) ([9ab79ec](https://github.com/googleapis/java-bigquery/commit/9ab79ec14d661e79f152568ff667bef3482315e0)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.25.0 ([#3140](https://github.com/googleapis/java-bigquery/issues/3140)) ([e61a7bc](https://github.com/googleapis/java-bigquery/commit/e61a7bc5735822308db3baab327474b6319e4d93)) +* Update github/codeql-action action to v2.24.1 ([#3139](https://github.com/googleapis/java-bigquery/issues/3139)) ([4b3a429](https://github.com/googleapis/java-bigquery/commit/4b3a42991836c3bb23972616cb4d20756dc83488)) + +## [2.37.1](https://github.com/googleapis/java-bigquery/compare/v2.37.0...v2.37.1) (2024-02-06) + + +### Features + +* Add queryId to TableResult ([#3106](https://github.com/googleapis/java-bigquery/issues/3106)) ([2156f02](https://github.com/googleapis/java-bigquery/commit/2156f023b4ab95bc7ec669545b5709317555fdac)) +* Update universe domain exception error code/message ([#3113](https://github.com/googleapis/java-bigquery/issues/3113)) ([5a82c85](https://github.com/googleapis/java-bigquery/commit/5a82c854b6549c82c905eba4905378b59cc88af4)) + + +### Dependencies + +* Update actions/upload-artifact action to v4.3.1 ([#3121](https://github.com/googleapis/java-bigquery/issues/3121)) ([3abdc70](https://github.com/googleapis/java-bigquery/commit/3abdc70890c3f5c396055d7d34fc4fe18aef7371)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240124-2.0.0 ([#3104](https://github.com/googleapis/java-bigquery/issues/3104)) ([6eff68e](https://github.com/googleapis/java-bigquery/commit/6eff68eb8c55162ca1cd2e915f3f60a87f584b35)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.24.0 ([#3109](https://github.com/googleapis/java-bigquery/issues/3109)) ([5ad778c](https://github.com/googleapis/java-bigquery/commit/5ad778c18130c33e7532d0bd3193053518ca047a)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.10.0 ([#3110](https://github.com/googleapis/java-bigquery/issues/3110)) ([3f8e8d1](https://github.com/googleapis/java-bigquery/commit/3f8e8d1f9477aed3a14bccf021d7ff982463022c)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.10.0 ([#3111](https://github.com/googleapis/java-bigquery/issues/3111)) ([2858e96](https://github.com/googleapis/java-bigquery/commit/2858e96807190d995d9d682e056d90821da11b7a)) +* Update dependency org.junit.vintage:junit-vintage-engine to v5.10.2 ([#3119](https://github.com/googleapis/java-bigquery/issues/3119)) ([4b4fdd8](https://github.com/googleapis/java-bigquery/commit/4b4fdd8ebc324d2bc48a9f78347247f4f6c2e424)) +* Update github/codeql-action action to v2.23.2 ([#3102](https://github.com/googleapis/java-bigquery/issues/3102)) ([2cc545e](https://github.com/googleapis/java-bigquery/commit/2cc545ec1c0267fd9c33f3f1566f29d9a30f514e)) +* Update github/codeql-action action to v2.24.0 ([#3114](https://github.com/googleapis/java-bigquery/issues/3114)) ([01f0405](https://github.com/googleapis/java-bigquery/commit/01f04059ab119da759536426938c1069906c8be4)) + +## [2.37.0](https://github.com/googleapis/java-bigquery/compare/v2.36.0...v2.37.0) (2024-01-25) + + +### Features + +* Add support for Table resource tags ([#3046](https://github.com/googleapis/java-bigquery/issues/3046)) ([7d61111](https://github.com/googleapis/java-bigquery/commit/7d61111d23282c7e2478ac31ba0d4e423330ec92)) +* Add universe domain ([#3090](https://github.com/googleapis/java-bigquery/issues/3090)) ([b2814a2](https://github.com/googleapis/java-bigquery/commit/b2814a2f8e6601347d0489058e563878af40f301)) + + +### Dependencies + +* Update actions/upload-artifact action to v4.1.0 ([#3071](https://github.com/googleapis/java-bigquery/issues/3071)) ([3fbb2bb](https://github.com/googleapis/java-bigquery/commit/3fbb2bba7ad7bca245a8ca5eb59d999aead29ebd)) +* Update actions/upload-artifact action to v4.2.0 ([#3081](https://github.com/googleapis/java-bigquery/issues/3081)) ([af81354](https://github.com/googleapis/java-bigquery/commit/af81354c342cdb2a790cb008fc9fe3460e62265b)) +* Update actions/upload-artifact action to v4.3.0 ([#3091](https://github.com/googleapis/java-bigquery/issues/3091)) ([f4411b0](https://github.com/googleapis/java-bigquery/commit/f4411b09c4b681d2f3fb250ef133b291649f2865)) +* Update arrow.version to v15 ([#3084](https://github.com/googleapis/java-bigquery/issues/3084)) ([4d4cbae](https://github.com/googleapis/java-bigquery/commit/4d4cbae00028f84c071f51e88d7ee976efdf04d5)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.35.0 ([#3066](https://github.com/googleapis/java-bigquery/issues/3066)) ([48cdaa8](https://github.com/googleapis/java-bigquery/commit/48cdaa8a77935062cfe9ed8fb66f52f774bdd673)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.36.0 ([#3093](https://github.com/googleapis/java-bigquery/issues/3093)) ([24456a3](https://github.com/googleapis/java-bigquery/commit/24456a361a39550e962ac68a79de3c7a9e912884)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20240105-2.0.0 ([#3073](https://github.com/googleapis/java-bigquery/issues/3073)) ([f371d67](https://github.com/googleapis/java-bigquery/commit/f371d6709109acf07224ee0b2615de400fd90838)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.39.0 ([#3067](https://github.com/googleapis/java-bigquery/issues/3067)) ([6ff4f04](https://github.com/googleapis/java-bigquery/commit/6ff4f043905a6fe67bc3ed5cbbacc0f9eddd3172)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.40.0 ([#3094](https://github.com/googleapis/java-bigquery/issues/3094)) ([110bcc5](https://github.com/googleapis/java-bigquery/commit/110bcc506272b372fdd90ee718fad298c8ab7e19)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.22.0 ([#3080](https://github.com/googleapis/java-bigquery/issues/3080)) ([a5b119c](https://github.com/googleapis/java-bigquery/commit/a5b119cdf6f8ce9d180a2d51a3a7a9aad50b1ea4)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.23.0 ([#3096](https://github.com/googleapis/java-bigquery/issues/3096)) ([0933b34](https://github.com/googleapis/java-bigquery/commit/0933b34f30ade216dcf61767a771509ca07f294b)) +* Update dependency com.google.oauth-client:google-oauth-client-java6 to v1.35.0 ([#3078](https://github.com/googleapis/java-bigquery/issues/3078)) ([2614df2](https://github.com/googleapis/java-bigquery/commit/2614df203b1f3c9800b8c8c23d32e7f22ec76253)) +* Update dependency com.google.oauth-client:google-oauth-client-jetty to v1.35.0 ([#3079](https://github.com/googleapis/java-bigquery/issues/3079)) ([f03c4fc](https://github.com/googleapis/java-bigquery/commit/f03c4fc957ae5665fe0f98c0f06cc80eea7cec59)) +* Update github/codeql-action action to v2.23.0 ([#3061](https://github.com/googleapis/java-bigquery/issues/3061)) ([0fbdfba](https://github.com/googleapis/java-bigquery/commit/0fbdfba1aecf18567fae95aea133b6504f050bd5)) +* Update github/codeql-action action to v2.23.1 ([#3077](https://github.com/googleapis/java-bigquery/issues/3077)) ([e3f417c](https://github.com/googleapis/java-bigquery/commit/e3f417cec3bdd81040baac3f054d0270dde9d9f8)) + +## [2.36.0](https://github.com/googleapis/java-bigquery/compare/v2.35.0...v2.36.0) (2024-01-10) + + +### Features + +* Support RANGE schema ([#3043](https://github.com/googleapis/java-bigquery/issues/3043)) ([febfc1f](https://github.com/googleapis/java-bigquery/commit/febfc1fdb026a0d07ab24159437633cfb7f44c5d)) +* Use location in BigQueryOption as the default for query ([#3047](https://github.com/googleapis/java-bigquery/issues/3047)) ([270f866](https://github.com/googleapis/java-bigquery/commit/270f8665a4973be6091697bc8101bee76e26ff1d)) + + +### Bug Fixes + +* Bigquery.create NullPointerException when job already exists ([#3035](https://github.com/googleapis/java-bigquery/issues/3035)) ([38191b1](https://github.com/googleapis/java-bigquery/commit/38191b10a3c2f3ed89351e095c9fc1983bb301ea)) + + +### Dependencies + +* Update actions/upload-artifact action to v4 ([#3055](https://github.com/googleapis/java-bigquery/issues/3055)) ([7d76100](https://github.com/googleapis/java-bigquery/commit/7d761006b4a7f549bf254b03f67989c9b41cd7b1)) +* Update arrow.version to v14.0.2 ([#3050](https://github.com/googleapis/java-bigquery/issues/3050)) ([b0dc33a](https://github.com/googleapis/java-bigquery/commit/b0dc33ad4e004edcfbee131edb5745159f9e6af3)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.34.0 ([#3033](https://github.com/googleapis/java-bigquery/issues/3033)) ([a710632](https://github.com/googleapis/java-bigquery/commit/a7106325b0f688fc36b1b93ecb7001e45b54a454)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.21.0 ([#3060](https://github.com/googleapis/java-bigquery/issues/3060)) ([78995c4](https://github.com/googleapis/java-bigquery/commit/78995c47ba33019e1007f98d152016ffc3184bd4)) +* Update github/codeql-action action to v2.22.11 ([#3002](https://github.com/googleapis/java-bigquery/issues/3002)) ([52d5e97](https://github.com/googleapis/java-bigquery/commit/52d5e97bdb9fa58c9fbee06cc1b41d58feeae379)) + +## [2.35.0](https://github.com/googleapis/java-bigquery/compare/v2.34.2...v2.35.0) (2023-12-01) + + +### Features + +* Add InputBytes to extract job statistics ([#2998](https://github.com/googleapis/java-bigquery/issues/2998)) ([19b7c3a](https://github.com/googleapis/java-bigquery/commit/19b7c3ad842a566d4e3e93e48625e0281504de80)) +* Add Routine DataGovernanceType ([#3006](https://github.com/googleapis/java-bigquery/issues/3006)) ([ecb567b](https://github.com/googleapis/java-bigquery/commit/ecb567b75849cd0665ac4ab315a5af3bdf934f48)) + + +### Bug Fixes + +* Update TableInsertRows.java ([#2999](https://github.com/googleapis/java-bigquery/issues/2999)) ([ff4a086](https://github.com/googleapis/java-bigquery/commit/ff4a086f0aa3c0401b62489ea8a0b9e2fd6cb3fe)) + + +### Dependencies + +* Update actions/github-script action to v7 ([#3001](https://github.com/googleapis/java-bigquery/issues/3001)) ([d1bdeab](https://github.com/googleapis/java-bigquery/commit/d1bdeab242ea2e6374d1b6d0bbd9eadf638cbcb2)) +* Update actions/setup-java action to v4 ([#3018](https://github.com/googleapis/java-bigquery/issues/3018)) ([14ed571](https://github.com/googleapis/java-bigquery/commit/14ed571365f600b188b7d6716e2549c5b81868ad)) +* Update arrow.version to v14 ([#3023](https://github.com/googleapis/java-bigquery/issues/3023)) ([759fd64](https://github.com/googleapis/java-bigquery/commit/759fd64f73a437e4b9847a807b5b716069b4d20e)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20231111-2.0.0 ([#3020](https://github.com/googleapis/java-bigquery/issues/3020)) ([ef48002](https://github.com/googleapis/java-bigquery/commit/ef480029c9e1958c1b6a6f2241b110fb3cfe036d)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.20.0 ([#3019](https://github.com/googleapis/java-bigquery/issues/3019)) ([0293edb](https://github.com/googleapis/java-bigquery/commit/0293edb74e02f44803faacbce400df20da53f66c)) + +## [2.34.2](https://github.com/googleapis/java-bigquery/compare/v2.34.1...v2.34.2) (2023-11-07) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.32.0 ([#2989](https://github.com/googleapis/java-bigquery/issues/2989)) ([47a61a7](https://github.com/googleapis/java-bigquery/commit/47a61a7c2cb5fed88937670bca7b15e38529dfaf)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.36.0 ([#2990](https://github.com/googleapis/java-bigquery/issues/2990)) ([81c0727](https://github.com/googleapis/java-bigquery/commit/81c07275809eadc4e7146bd080475e4775102339)) + +## [2.34.1](https://github.com/googleapis/java-bigquery/compare/v2.34.0...v2.34.1) (2023-11-06) + + +### Dependencies + +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.19.0 ([#2986](https://github.com/googleapis/java-bigquery/issues/2986)) ([0d400da](https://github.com/googleapis/java-bigquery/commit/0d400da7f73ee44ab5053ef51b1d45a9d29f0ebb)) +* Update dependency org.checkerframework:checker-compat-qual to v2.5.6 ([#2982](https://github.com/googleapis/java-bigquery/issues/2982)) ([c137f1f](https://github.com/googleapis/java-bigquery/commit/c137f1f17f192f4f0a3c4d33e1d27677dbf4556b)) +* Update dependency org.junit.vintage:junit-vintage-engine to v5.10.1 ([#2984](https://github.com/googleapis/java-bigquery/issues/2984)) ([a64b91c](https://github.com/googleapis/java-bigquery/commit/a64b91c03b0291452d53cc407d9c841b3567fe23)) +* Update github/codeql-action action to v2.22.5 ([#2975](https://github.com/googleapis/java-bigquery/issues/2975)) ([0b88846](https://github.com/googleapis/java-bigquery/commit/0b8884634f8bd21615a9263bb1344cb162adfa47)) + +## [2.34.0](https://github.com/googleapis/java-bigquery/compare/v2.33.2...v2.34.0) (2023-10-26) + + +### Features + +* Add BigLakeConfiguration Property in StandardTableDefinition.java ([#2916](https://github.com/googleapis/java-bigquery/issues/2916)) ([1d660fa](https://github.com/googleapis/java-bigquery/commit/1d660fa19f0d82c2b6ec2ea9590881e513274c25)) +* Add support for Dataset property storageBillingModel ([#2913](https://github.com/googleapis/java-bigquery/issues/2913)) ([f452cf4](https://github.com/googleapis/java-bigquery/commit/f452cf4e100b6cc211681a840ddbd0be5108d01e)) +* Add support for preview features ([#2923](https://github.com/googleapis/java-bigquery/issues/2923)) ([113b8f2](https://github.com/googleapis/java-bigquery/commit/113b8f27419365c7277c6a300c5f07cea954cca1)) + + +### Dependencies + +* Update actions/checkout action to v4.1.1 ([#2950](https://github.com/googleapis/java-bigquery/issues/2950)) ([c556c18](https://github.com/googleapis/java-bigquery/commit/c556c1837baf0d53245452d6a152910df7883262)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.30.0 ([#2942](https://github.com/googleapis/java-bigquery/issues/2942)) ([e760fca](https://github.com/googleapis/java-bigquery/commit/e760fcae98b23ff4e7fc3ae25f2437be220e9df9)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.31.0 ([#2967](https://github.com/googleapis/java-bigquery/issues/2967)) ([7ed55b5](https://github.com/googleapis/java-bigquery/commit/7ed55b5c075dbac827c6201d0398ff87d8240b38)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20231008-2.0.0 ([#2946](https://github.com/googleapis/java-bigquery/issues/2946)) ([3d0da5b](https://github.com/googleapis/java-bigquery/commit/3d0da5b5a20f49721477afbed10ea3fff43652bb)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.34.0 ([#2943](https://github.com/googleapis/java-bigquery/issues/2943)) ([18162c3](https://github.com/googleapis/java-bigquery/commit/18162c37c97eff6387e0f58d211f2c1725a9c8d3)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.35.0 ([#2968](https://github.com/googleapis/java-bigquery/issues/2968)) ([219db2c](https://github.com/googleapis/java-bigquery/commit/219db2c0023610d2adcba4889a9b785df2113893)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.18.0 ([#2955](https://github.com/googleapis/java-bigquery/issues/2955)) ([1ee18eb](https://github.com/googleapis/java-bigquery/commit/1ee18ebeb90adeb371ef04cbfc7b18be2c24d1e8)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.28 ([#2956](https://github.com/googleapis/java-bigquery/issues/2956)) ([b03effd](https://github.com/googleapis/java-bigquery/commit/b03effd3b5f5fd6365de9a6267a1a8ace46d7718)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.28 ([#2957](https://github.com/googleapis/java-bigquery/issues/2957)) ([6465e41](https://github.com/googleapis/java-bigquery/commit/6465e413c93e01069f86c80fc424715d46f9067b)) +* Update github/codeql-action action to v2.22.2 ([#2944](https://github.com/googleapis/java-bigquery/issues/2944)) ([f584e59](https://github.com/googleapis/java-bigquery/commit/f584e59571f0c7918d2d83a19b00d49bd5b558c4)) +* Update github/codeql-action action to v2.22.3 ([#2954](https://github.com/googleapis/java-bigquery/issues/2954)) ([1b2bc18](https://github.com/googleapis/java-bigquery/commit/1b2bc18bf49d06e1ccd29745be649108dd28cfa5)) +* Update github/codeql-action action to v2.22.4 ([#2958](https://github.com/googleapis/java-bigquery/issues/2958)) ([de9bcee](https://github.com/googleapis/java-bigquery/commit/de9bcee50ba682ffa93aae063191a8880741507d)) +* Update ossf/scorecard-action action to v2.3.1 ([#2960](https://github.com/googleapis/java-bigquery/issues/2960)) ([855e698](https://github.com/googleapis/java-bigquery/commit/855e69889f68592608c8a56070ffdafdf8365f57)) + +## [2.33.2](https://github.com/googleapis/java-bigquery/compare/v2.33.1...v2.33.2) (2023-10-11) + + +### Bug Fixes + +* GetDouble in read API path ([#2919](https://github.com/googleapis/java-bigquery/issues/2919)) ([436ee8e](https://github.com/googleapis/java-bigquery/commit/436ee8ebe9104f6ca721f1a14bd409158c7bdb5a)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.29.0 ([#2911](https://github.com/googleapis/java-bigquery/issues/2911)) ([052f5c2](https://github.com/googleapis/java-bigquery/commit/052f5c2f722243be39c0d93b1f81b81a0db48ef1)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20230925-2.0.0 ([#2921](https://github.com/googleapis/java-bigquery/issues/2921)) ([f0fb64f](https://github.com/googleapis/java-bigquery/commit/f0fb64f43817c5aa53adb6c5152afe3fd44b7df1)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.33.0 ([#2912](https://github.com/googleapis/java-bigquery/issues/2912)) ([e053494](https://github.com/googleapis/java-bigquery/commit/e05349476a8d987b2cd24ee6a80b2d9b7b9463ee)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.17.0 ([#2931](https://github.com/googleapis/java-bigquery/issues/2931)) ([25a94f1](https://github.com/googleapis/java-bigquery/commit/25a94f1da840c0cb32bed882e18f7b10f9890d04)) +* Update github/codeql-action action to v2.22.0 ([#2926](https://github.com/googleapis/java-bigquery/issues/2926)) ([33ce4ae](https://github.com/googleapis/java-bigquery/commit/33ce4aee8b10f630212d96901af00f063c43f440)) +* Update github/codeql-action action to v2.22.1 ([#2934](https://github.com/googleapis/java-bigquery/issues/2934)) ([7ae7b99](https://github.com/googleapis/java-bigquery/commit/7ae7b99ab21fcd6e74cbaa95e750da961c09ae80)) +* Update ossf/scorecard-action action to v2.3.0 ([#2927](https://github.com/googleapis/java-bigquery/issues/2927)) ([93bfd8e](https://github.com/googleapis/java-bigquery/commit/93bfd8eb7fe35121e97b06cf8b103a3960fe9535)) + +## [2.33.1](https://github.com/googleapis/java-bigquery/compare/v2.33.0...v2.33.1) (2023-09-28) + + +### Bug Fixes + +* Dry run NPE when there is no query parameters ([#2899](https://github.com/googleapis/java-bigquery/issues/2899)) ([8f85a4d](https://github.com/googleapis/java-bigquery/commit/8f85a4d540623e8b4c83005e62e842ba36f8fb1b)) + +## [2.33.0](https://github.com/googleapis/java-bigquery/compare/v2.32.0...v2.33.0) (2023-09-27) + + +### Features + +* Add support for FileSetSpec ([#2888](https://github.com/googleapis/java-bigquery/issues/2888)) ([3895bd9](https://github.com/googleapis/java-bigquery/commit/3895bd94b283b6ff731cfa94426ea0691e0d54c4)) + + +### Bug Fixes + +* Update samples snippet to write to BYTES instead of ARRAY<BYTES> ([#2876](https://github.com/googleapis/java-bigquery/issues/2876)) ([7e040e9](https://github.com/googleapis/java-bigquery/commit/7e040e97eeec762ab97190dea33b94769d681bf0)) + + +### Dependencies + +* Update actions/checkout action ([#2893](https://github.com/googleapis/java-bigquery/issues/2893)) ([e3655af](https://github.com/googleapis/java-bigquery/commit/e3655af235f002128979ed592c5aade33a4c7596)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.16.1 ([#2892](https://github.com/googleapis/java-bigquery/issues/2892)) ([e1d9871](https://github.com/googleapis/java-bigquery/commit/e1d987199ad8994aa3e9115daf26e0fb27aef911)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.27 ([#2885](https://github.com/googleapis/java-bigquery/issues/2885)) ([2237ca2](https://github.com/googleapis/java-bigquery/commit/2237ca2a1dbe9e1dc1d5e6c0dc2bd2fd39e01ef0)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.27 ([#2886](https://github.com/googleapis/java-bigquery/issues/2886)) ([539b4e6](https://github.com/googleapis/java-bigquery/commit/539b4e62f80598fb510fad37429ae0441db04c6f)) +* Update github/codeql-action action to v2.21.4 ([#2829](https://github.com/googleapis/java-bigquery/issues/2829)) ([599e3b3](https://github.com/googleapis/java-bigquery/commit/599e3b3d7e948a0688c6e08d4910f9db5c532f99)) +* Update github/codeql-action action to v2.21.8 - abandoned ([#2897](https://github.com/googleapis/java-bigquery/issues/2897)) ([ab4e1d0](https://github.com/googleapis/java-bigquery/commit/ab4e1d026c34b7d28caaf5b0b1465ac2de62c530)) +* Update github/codeql-action action to v2.21.8 ([#2889](https://github.com/googleapis/java-bigquery/issues/2889)) ([b568026](https://github.com/googleapis/java-bigquery/commit/b568026fe1b8fb7365306b718b5f8540fb13b8dc)) +* Update github/codeql-action action to v2.21.9 ([#2901](https://github.com/googleapis/java-bigquery/issues/2901)) ([33a729f](https://github.com/googleapis/java-bigquery/commit/33a729f367ba6d9f04595e1b781c7eb321289380)) + +## [2.32.0](https://github.com/googleapis/java-bigquery/compare/v2.31.2...v2.32.0) (2023-09-14) + + +### Features + +* Add support for converting interval fields to threeten PeriodDuration ([#2838](https://github.com/googleapis/java-bigquery/issues/2838)) ([2294c2f](https://github.com/googleapis/java-bigquery/commit/2294c2ffca62a22a66786a9a4c6c9ef1be898e5d)) +* Add support for ExternalDatasetReference ([#2871](https://github.com/googleapis/java-bigquery/issues/2871)) ([bbb86fd](https://github.com/googleapis/java-bigquery/commit/bbb86fd8488ad253f2e9cf3fb08360330bd860a3)) + + +### Dependencies + +* Update actions/checkout action to v4 ([#2862](https://github.com/googleapis/java-bigquery/issues/2862)) ([902e9b9](https://github.com/googleapis/java-bigquery/commit/902e9b97cd548910354297ff6e605df094a03175)) +* Update actions/upload-artifact action to v3.1.3 ([#2867](https://github.com/googleapis/java-bigquery/issues/2867)) ([cbbf0fb](https://github.com/googleapis/java-bigquery/commit/cbbf0fb8a99c0633335d81cd36a7b53dfe9df20b)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.26.0 ([#2873](https://github.com/googleapis/java-bigquery/issues/2873)) ([6196625](https://github.com/googleapis/java-bigquery/commit/6196625d614ce80641008ffab3b5bf9720651bb9)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.30.0 ([#2874](https://github.com/googleapis/java-bigquery/issues/2874)) ([6cafedf](https://github.com/googleapis/java-bigquery/commit/6cafedf634bc88f41b2b5d3ec1425341b02ac8b6)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.15.0 ([#2870](https://github.com/googleapis/java-bigquery/issues/2870)) ([f24439b](https://github.com/googleapis/java-bigquery/commit/f24439b7adf1f08cee4b65918b4395861fe88517)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.26 ([#2868](https://github.com/googleapis/java-bigquery/issues/2868)) ([d01031c](https://github.com/googleapis/java-bigquery/commit/d01031cbc6d50f9aff8c6d49a8d2c54496779451)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.26 ([#2869](https://github.com/googleapis/java-bigquery/issues/2869)) ([edd7141](https://github.com/googleapis/java-bigquery/commit/edd714129b65d73f894591c4d40e1a8e79c36b04)) + +## [2.31.2](https://github.com/googleapis/java-bigquery/compare/v2.31.1...v2.31.2) (2023-09-05) + + +### Bug Fixes + +* Hide TableReference data struct ([#2855](https://github.com/googleapis/java-bigquery/issues/2855)) ([2cbded6](https://github.com/googleapis/java-bigquery/commit/2cbded6600af1de8ec15b04a2496733ad2b50c47)) +* SearchStats IndexUnusedReasons null bug ([#2825](https://github.com/googleapis/java-bigquery/issues/2825)) ([309ea60](https://github.com/googleapis/java-bigquery/commit/309ea607a9ff50e59dc4e1069c689c1da9605ed5)) + +## [2.31.1](https://github.com/googleapis/java-bigquery/compare/v2.31.0...v2.31.1) (2023-08-09) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.25.0 ([#2845](https://github.com/googleapis/java-bigquery/issues/2845)) ([d940f8d](https://github.com/googleapis/java-bigquery/commit/d940f8d7f119d75aaa80eb60babd5406fca76c69)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.29.0 ([#2846](https://github.com/googleapis/java-bigquery/issues/2846)) ([87a0a10](https://github.com/googleapis/java-bigquery/commit/87a0a10d806fdcbf4bdb1ee1478b9ee6aeb7b287)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.14.0 ([#2834](https://github.com/googleapis/java-bigquery/issues/2834)) ([79fe14c](https://github.com/googleapis/java-bigquery/commit/79fe14c08836b40bf84775a526cba32f63dd8227)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.24 ([#2839](https://github.com/googleapis/java-bigquery/issues/2839)) ([ae752bc](https://github.com/googleapis/java-bigquery/commit/ae752bc36c516e1a4172bc0c9cfa7ed6bbcbe0e7)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.24 ([#2840](https://github.com/googleapis/java-bigquery/issues/2840)) ([1ae6cb9](https://github.com/googleapis/java-bigquery/commit/1ae6cb9b20152e00db9a559ff143faca581bf8b1)) +* Update github/codeql-action action to v2.21.1 ([#2824](https://github.com/googleapis/java-bigquery/issues/2824)) ([9978971](https://github.com/googleapis/java-bigquery/commit/997897166ba121256b7fa6f4c63f83daebdc6a54)) +* Update jmh.version to v1.37 ([#2836](https://github.com/googleapis/java-bigquery/issues/2836)) ([4b3a3c2](https://github.com/googleapis/java-bigquery/commit/4b3a3c22985c76f7e861341dc76e96abc970eaec)) + +## [2.31.0](https://github.com/googleapis/java-bigquery/compare/v2.30.1...v2.31.0) (2023-07-25) + + +### Features + +* Adds Exception handling to handle ALREADY EXISTS error ([#2788](https://github.com/googleapis/java-bigquery/issues/2788)) ([67a07ea](https://github.com/googleapis/java-bigquery/commit/67a07ea45c4635a2e9d43220d4bc34780eb512ef)) + + +### Dependencies + +* Update arrow.version to v12.0.1 ([#2750](https://github.com/googleapis/java-bigquery/issues/2750)) ([f92bee5](https://github.com/googleapis/java-bigquery/commit/f92bee558f6de070fb9f525bb47a824dc2c53e07)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.24.0 ([#2811](https://github.com/googleapis/java-bigquery/issues/2811)) ([b660063](https://github.com/googleapis/java-bigquery/commit/b6600635e01f930fa19769cb2594ef4f43226124)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.28.0 ([#2812](https://github.com/googleapis/java-bigquery/issues/2812)) ([921716d](https://github.com/googleapis/java-bigquery/commit/921716d54ea34f1914d16f7774124175be488087)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.13.1 ([#2806](https://github.com/googleapis/java-bigquery/issues/2806)) ([df3cd76](https://github.com/googleapis/java-bigquery/commit/df3cd761b5111bb0f174f691765697aa1bcbebde)) +* Update dependency org.junit.vintage:junit-vintage-engine to v5.10.0 ([#2808](https://github.com/googleapis/java-bigquery/issues/2808)) ([694f711](https://github.com/googleapis/java-bigquery/commit/694f7111d7497126d429c5dda17f44f661279582)) +* Update github/codeql-action action to v2.21.0 ([#2803](https://github.com/googleapis/java-bigquery/issues/2803)) ([c6c536c](https://github.com/googleapis/java-bigquery/commit/c6c536c618908425f288fa0e6f516df2cc6a4b97)) + +## [2.30.1](https://github.com/googleapis/java-bigquery/compare/v2.30.0...v2.30.1) (2023-07-18) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.23.0 ([#2791](https://github.com/googleapis/java-bigquery/issues/2791)) ([940301b](https://github.com/googleapis/java-bigquery/commit/940301b327bf941cfab56d68759b6f1494fda22f)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.27.0 ([#2792](https://github.com/googleapis/java-bigquery/issues/2792)) ([c791066](https://github.com/googleapis/java-bigquery/commit/c79106678a0ac62b34605f19ca2baea296ea531c)) + +## [2.30.0](https://github.com/googleapis/java-bigquery/compare/v2.29.0...v2.30.0) (2023-07-17) + + +### Features + +* Add missing storage related fields to Table, TableInfo and StandardTableDefinition ([#2673](https://github.com/googleapis/java-bigquery/issues/2673)) ([e3003f4](https://github.com/googleapis/java-bigquery/commit/e3003f48df9cca2bd549d893ffef3bb198a3b2aa)) +* Add support for Search statistics ([#2787](https://github.com/googleapis/java-bigquery/issues/2787)) ([344f695](https://github.com/googleapis/java-bigquery/commit/344f695e319470acf350ebdd56d643c03704ea1f)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.22.0 ([#2777](https://github.com/googleapis/java-bigquery/issues/2777)) ([078f244](https://github.com/googleapis/java-bigquery/commit/078f244572db7484471d2c55a0db4533de0d1dc7)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.26.0 ([#2778](https://github.com/googleapis/java-bigquery/issues/2778)) ([2ee52c9](https://github.com/googleapis/java-bigquery/commit/2ee52c934d253d29c16b25d498ebe8e968cda481)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.13.0 ([#2786](https://github.com/googleapis/java-bigquery/issues/2786)) ([dd14eee](https://github.com/googleapis/java-bigquery/commit/dd14eee126f3cb6be7c943157e65acd5d4a088d4)) +* Update github/codeql-action action to v2.20.1 ([#2766](https://github.com/googleapis/java-bigquery/issues/2766)) ([2014613](https://github.com/googleapis/java-bigquery/commit/201461351ac9813f6d11e6f5c3b9ec4dd01c001b)) +* Update github/codeql-action action to v2.20.4 ([#2784](https://github.com/googleapis/java-bigquery/issues/2784)) ([e886f5f](https://github.com/googleapis/java-bigquery/commit/e886f5fa79aee469fe7b8860b5e87951635b6ce7)) +* Update ossf/scorecard-action action to v2.2.0 ([#2775](https://github.com/googleapis/java-bigquery/issues/2775)) ([688b2a0](https://github.com/googleapis/java-bigquery/commit/688b2a0b16b578dc0784094608b35cb3a68f151b)) + +## [2.29.0](https://github.com/googleapis/java-bigquery/compare/v2.28.0...v2.29.0) (2023-06-23) + + +### Features + +* Increase default Read API timeout to 60s ([#2764](https://github.com/googleapis/java-bigquery/issues/2764)) ([f606d0b](https://github.com/googleapis/java-bigquery/commit/f606d0b28ca8f65654413a99ab698f35e3befce1)) + + +### Dependencies + +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.12.0 ([#2771](https://github.com/googleapis/java-bigquery/issues/2771)) ([7537e0f](https://github.com/googleapis/java-bigquery/commit/7537e0f31d8f4696559ef09c7bd284bf78217280)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.23 ([#2759](https://github.com/googleapis/java-bigquery/issues/2759)) ([27ba48a](https://github.com/googleapis/java-bigquery/commit/27ba48a0cab331f2d233ba96fed710c11d31dc53)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.23 ([#2760](https://github.com/googleapis/java-bigquery/issues/2760)) ([8cddf8f](https://github.com/googleapis/java-bigquery/commit/8cddf8fd286f51cd75aba0da6a52cbc12cab7e2a)) + +## [2.28.0](https://github.com/googleapis/java-bigquery/compare/v2.27.1...v2.28.0) (2023-06-19) + + +### Features + +* Add primary key and foreign keys ([#2744](https://github.com/googleapis/java-bigquery/issues/2744)) ([afb571c](https://github.com/googleapis/java-bigquery/commit/afb571c97edb13f93df9ac140af4516205d27a49)) +* Partial Projection of Table Metadata ([#2756](https://github.com/googleapis/java-bigquery/issues/2756)) ([9207743](https://github.com/googleapis/java-bigquery/commit/92077437d759705151f7778207616ecf024371ba)) +* Return JobID with TableResult ([#2689](https://github.com/googleapis/java-bigquery/issues/2689)) ([aa38428](https://github.com/googleapis/java-bigquery/commit/aa38428ad26b64d2566b33f4b2ca3dcc102c3247)) + +## [2.27.1](https://github.com/googleapis/java-bigquery/compare/v2.27.0...v2.27.1) (2023-06-13) + + +### Dependencies + +* Update actions/checkout action to v3.5.3 ([#2746](https://github.com/googleapis/java-bigquery/issues/2746)) ([17f8438](https://github.com/googleapis/java-bigquery/commit/17f843880f5633b602de5221c26b830e7e304d2b)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.21.0 ([#2741](https://github.com/googleapis/java-bigquery/issues/2741)) ([d665e52](https://github.com/googleapis/java-bigquery/commit/d665e523b2c393c17a734ff4714aeb85f8d61dd7)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.25.0 ([#2743](https://github.com/googleapis/java-bigquery/issues/2743)) ([5d38d23](https://github.com/googleapis/java-bigquery/commit/5d38d2375cedd29e35d75881a206cab3fdcdd6a5)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.11.0 ([#2738](https://github.com/googleapis/java-bigquery/issues/2738)) ([3b56445](https://github.com/googleapis/java-bigquery/commit/3b564458eef9df2173c47e26e2399a6a6cad6eee)) +* Update github/codeql-action action to v2.20.0 ([#2751](https://github.com/googleapis/java-bigquery/issues/2751)) ([42ae181](https://github.com/googleapis/java-bigquery/commit/42ae18134b972c1694a7e012d2f51c916e663c83)) +* Update github/codeql-action action to v2.3.6 ([#2712](https://github.com/googleapis/java-bigquery/issues/2712)) ([f043ed6](https://github.com/googleapis/java-bigquery/commit/f043ed61dacf4ea66eedaf0a6faada06057b7d50)) + +## [2.27.0](https://github.com/googleapis/java-bigquery/compare/v2.26.1...v2.27.0) (2023-05-30) + + +### Features + +* Add support for session id on TableDataWriteChannel ([#2715](https://github.com/googleapis/java-bigquery/issues/2715)) ([42851d8](https://github.com/googleapis/java-bigquery/commit/42851d818ee825d7c4141d40d116e1da43c11f14)) + + +### Bug Fixes + +* Add support for repeated record query parameters ([#2698](https://github.com/googleapis/java-bigquery/issues/2698)) ([51aff50](https://github.com/googleapis/java-bigquery/commit/51aff502215d69bd0151030421cd18646c6ead36)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.20.0 ([#2720](https://github.com/googleapis/java-bigquery/issues/2720)) ([4962cac](https://github.com/googleapis/java-bigquery/commit/4962cac8fb3fe8d77a136eaf1b579cd79304acfb)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20230506-2.0.0 ([#2707](https://github.com/googleapis/java-bigquery/issues/2707)) ([4d2ec07](https://github.com/googleapis/java-bigquery/commit/4d2ec0716287e9624949cbcdf6605c127c209be4)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20230520-2.0.0 ([#2723](https://github.com/googleapis/java-bigquery/issues/2723)) ([5c64797](https://github.com/googleapis/java-bigquery/commit/5c64797c603343408849535b2dbf8080cd11ca32)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v2.37.2 ([#2726](https://github.com/googleapis/java-bigquery/issues/2726)) ([052c47a](https://github.com/googleapis/java-bigquery/commit/052c47aa43b0f50414db3031914e8a775ae98925)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.24.0 ([#2721](https://github.com/googleapis/java-bigquery/issues/2721)) ([7c357fb](https://github.com/googleapis/java-bigquery/commit/7c357fb414d45fde734c09c88ee3023d8d8f5822)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.10.1 ([#2713](https://github.com/googleapis/java-bigquery/issues/2713)) ([744e83a](https://github.com/googleapis/java-bigquery/commit/744e83a3da5323bc2cff2bcc6368a3eec39f392e)) + +## [2.26.1](https://github.com/googleapis/java-bigquery/compare/v2.26.0...v2.26.1) (2023-05-16) + + +### Bug Fixes + +* Custom host for resumable uploads ([#2696](https://github.com/googleapis/java-bigquery/issues/2696)) ([2b4eff1](https://github.com/googleapis/java-bigquery/commit/2b4eff1fed8b1ac9bf24bd69440377e904bc66e1)) + +## [2.26.0](https://github.com/googleapis/java-bigquery/compare/v2.25.0...v2.26.0) (2023-05-15) + + +### Features + +* Add field in HivePartitioningOptions ([#2678](https://github.com/googleapis/java-bigquery/issues/2678)) ([4165e55](https://github.com/googleapis/java-bigquery/commit/4165e5549d7a8e8e011d7700bc791e9b470c670d)) +* Allow passing autodetect_schema on table update ([#2661](https://github.com/googleapis/java-bigquery/issues/2661)) ([4c01698](https://github.com/googleapis/java-bigquery/commit/4c01698e571d7adbaf914984cdf65f6c35e4edb8)) + + +### Bug Fixes + +* Move ratio calculation for whether to use read API to avoid NPE with setUseReadAPI(false) ([#2509](https://github.com/googleapis/java-bigquery/issues/2509)) ([e1326c8](https://github.com/googleapis/java-bigquery/commit/e1326c8b615f392f80a09d36a1b4cef79dfea662)) + + +### Dependencies + +* Update arrow.version to v12 (major) ([#2675](https://github.com/googleapis/java-bigquery/issues/2675)) ([7700cf5](https://github.com/googleapis/java-bigquery/commit/7700cf588d4a6d3b3267d8fd51eaf8aed1752506)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.19.0 ([#2691](https://github.com/googleapis/java-bigquery/issues/2691)) ([1939803](https://github.com/googleapis/java-bigquery/commit/193980319cb743e6b6c67648ddb21432e5e69ff8)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.23.0 ([#2692](https://github.com/googleapis/java-bigquery/issues/2692)) ([f56e541](https://github.com/googleapis/java-bigquery/commit/f56e54161894c3be6e975ac102454afca4c9b058)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.9.0 ([#2685](https://github.com/googleapis/java-bigquery/issues/2685)) ([b74da29](https://github.com/googleapis/java-bigquery/commit/b74da296e2d2739adb481cda417d51569d1acc51)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.22 ([#2687](https://github.com/googleapis/java-bigquery/issues/2687)) ([cf5d758](https://github.com/googleapis/java-bigquery/commit/cf5d758fe6aad4b374c34940aa93f8060f779505)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.22 ([#2688](https://github.com/googleapis/java-bigquery/issues/2688)) ([32ea8ab](https://github.com/googleapis/java-bigquery/commit/32ea8ab19bff86d8183ddd9d6e6d06303eb9d83f)) +* Update github/codeql-action action to v2.3.3 ([#2658](https://github.com/googleapis/java-bigquery/issues/2658)) ([487f207](https://github.com/googleapis/java-bigquery/commit/487f20707c9b320a68100f85b2a1277cad9b37ea)) + +## [2.25.0](https://github.com/googleapis/java-bigquery/compare/v2.24.5...v2.25.0) (2023-04-27) + + +### Features + +* Add ICEBERG format options ([#2662](https://github.com/googleapis/java-bigquery/issues/2662)) ([55048ca](https://github.com/googleapis/java-bigquery/commit/55048caf6b308dca3a0961595a4a735c44d99bbb)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.18.0 ([#2648](https://github.com/googleapis/java-bigquery/issues/2648)) ([29bd415](https://github.com/googleapis/java-bigquery/commit/29bd415c5c3d3f3c433821277fcd831796daa3d2)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20230408-2.0.0 ([#2650](https://github.com/googleapis/java-bigquery/issues/2650)) ([b9c2f60](https://github.com/googleapis/java-bigquery/commit/b9c2f60cb296f488cc4095b54fbcc459dc2f3fa5)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v2.36.1 ([fea119b](https://github.com/googleapis/java-bigquery/commit/fea119b79eea54e9f0d221e1e71a2ca77957c657)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.22.0 ([#2649](https://github.com/googleapis/java-bigquery/issues/2649)) ([b6326f3](https://github.com/googleapis/java-bigquery/commit/b6326f3aed15d312987109009af66c7aa5a30a1d)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.8.0 ([#2659](https://github.com/googleapis/java-bigquery/issues/2659)) ([691a47a](https://github.com/googleapis/java-bigquery/commit/691a47abb63bd34b5c5c57439124d4713013b94c)) +* Update dependency org.junit.vintage:junit-vintage-engine to v5.9.3 ([#2660](https://github.com/googleapis/java-bigquery/issues/2660)) ([319f98e](https://github.com/googleapis/java-bigquery/commit/319f98eee611a10cf26582e32c9e6f8e24385565)) + +## [2.24.5](https://github.com/googleapis/java-bigquery/compare/v2.24.4...v2.24.5) (2023-04-14) + + +### Dependencies + +* Update actions/checkout action to v3.5.2 ([#2630](https://github.com/googleapis/java-bigquery/issues/2630)) ([95e49fd](https://github.com/googleapis/java-bigquery/commit/95e49fd47648ff1574e60ed3a1ab8dacb75df654)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.16.0 ([#2625](https://github.com/googleapis/java-bigquery/issues/2625)) ([594a7b4](https://github.com/googleapis/java-bigquery/commit/594a7b4bf150a5963c149f8f5f6edd18adbf99fe)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20230401-2.0.0 ([#2631](https://github.com/googleapis/java-bigquery/issues/2631)) ([5d8d9a6](https://github.com/googleapis/java-bigquery/commit/5d8d9a6dadb901b6b89b992965f0d1af332a6328)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v2.35.0 ([8439020](https://github.com/googleapis/java-bigquery/commit/843902051342889e6d6f23a84385441f1f173930)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.20.0 ([#2626](https://github.com/googleapis/java-bigquery/issues/2626)) ([f466b51](https://github.com/googleapis/java-bigquery/commit/f466b514fed248b1193900ac65be3d9a5154a858)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.7.0 ([#2637](https://github.com/googleapis/java-bigquery/issues/2637)) ([e8f07d7](https://github.com/googleapis/java-bigquery/commit/e8f07d70f754bb6a5937af7af436714bf2301af0)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.21 ([#2633](https://github.com/googleapis/java-bigquery/issues/2633)) ([3e376b1](https://github.com/googleapis/java-bigquery/commit/3e376b146dd5dfd38ae03dd48fa1e12bc7d12ead)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.21 ([#2634](https://github.com/googleapis/java-bigquery/issues/2634)) ([000f720](https://github.com/googleapis/java-bigquery/commit/000f72026e1464ec698032eff6eeb31afcfa963a)) +* Update github/codeql-action action to v2.2.12 ([#2635](https://github.com/googleapis/java-bigquery/issues/2635)) ([b2f97e9](https://github.com/googleapis/java-bigquery/commit/b2f97e90da2891ddb59fc0291b54ebcd3d945709)) +* Update ossf/scorecard-action action to v2.1.3 ([#2618](https://github.com/googleapis/java-bigquery/issues/2618)) ([d166401](https://github.com/googleapis/java-bigquery/commit/d166401edd01d6d9306027d601d26d66b661de02)) + +## [2.24.4](https://github.com/googleapis/java-bigquery/compare/v2.24.3...v2.24.4) (2023-03-30) + + +### Bug Fixes + +* QueryWithStructsParameters sample mismatch ([#2610](https://github.com/googleapis/java-bigquery/issues/2610)) ([71f9f55](https://github.com/googleapis/java-bigquery/commit/71f9f55225eac6d7d4a3d5f0960b28bf8769d03d)) + + +### Dependencies + +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20230318-2.0.0 ([#2607](https://github.com/googleapis/java-bigquery/issues/2607)) ([a328eb2](https://github.com/googleapis/java-bigquery/commit/a328eb285b6007e0e01b059a03e71a2b5a6e7399)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v2.34.2 ([#2619](https://github.com/googleapis/java-bigquery/issues/2619)) ([e4aa0fe](https://github.com/googleapis/java-bigquery/commit/e4aa0fef53f9eebbe09d878318521df5d070fd70)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.6.0 ([#2612](https://github.com/googleapis/java-bigquery/issues/2612)) ([eac97ac](https://github.com/googleapis/java-bigquery/commit/eac97ac03007b52e6c242f427ac21e491bfefb13)) +* Update github/codeql-action action to v2.2.9 ([#2608](https://github.com/googleapis/java-bigquery/issues/2608)) ([24aac14](https://github.com/googleapis/java-bigquery/commit/24aac1489dd99ebd0133e80a21dd7309f2c17494)) + +## [2.24.3](https://github.com/googleapis/java-bigquery/compare/v2.24.2...v2.24.3) (2023-03-24) + + +### Dependencies + +* Update actions/checkout action to v3.5.0 ([#2600](https://github.com/googleapis/java-bigquery/issues/2600)) ([f38d9f1](https://github.com/googleapis/java-bigquery/commit/f38d9f17fdc067392631ee3c2d2a6c658985f68d)) + +## [2.24.2](https://github.com/googleapis/java-bigquery/compare/v2.24.1...v2.24.2) (2023-03-22) + + +### Dependencies + +* Update github/codeql-action action to v2.2.8 ([#2593](https://github.com/googleapis/java-bigquery/issues/2593)) ([d306ad8](https://github.com/googleapis/java-bigquery/commit/d306ad8189a44c066ad5305596c86fae71251055)) + +## [2.24.1](https://github.com/googleapis/java-bigquery/compare/v2.24.0...v2.24.1) (2023-03-21) + + +### Dependencies + +* Update cloud client dependencies ([7b07779](https://github.com/googleapis/java-bigquery/commit/7b0777924103a711f9e3066c0a52adc551f24fca)) +* Update dependency com.google.cloud:google-cloud-bigquery to v2.23.2 ([7b07779](https://github.com/googleapis/java-bigquery/commit/7b0777924103a711f9e3066c0a52adc551f24fca)) +* Update dependency com.google.cloud:google-cloud-bigquery to v2.24.0 ([7b07779](https://github.com/googleapis/java-bigquery/commit/7b0777924103a711f9e3066c0a52adc551f24fca)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v2.34.1 ([7b07779](https://github.com/googleapis/java-bigquery/commit/7b0777924103a711f9e3066c0a52adc551f24fca)) +* Update dependency com.google.cloud:google-cloud-bigtable to v2.20.1 ([7b07779](https://github.com/googleapis/java-bigquery/commit/7b0777924103a711f9e3066c0a52adc551f24fca)) +* Update dependency com.google.cloud:libraries-bom to v26.10.0 ([7b07779](https://github.com/googleapis/java-bigquery/commit/7b0777924103a711f9e3066c0a52adc551f24fca)) + +## [2.24.0](https://github.com/googleapis/java-bigquery/compare/v2.23.2...v2.24.0) (2023-03-21) + + +### Features + +* Add support for clone ([#2553](https://github.com/googleapis/java-bigquery/issues/2553)) ([2186c64](https://github.com/googleapis/java-bigquery/commit/2186c64e523e030a777eec447bc1f22802f56617)) + + +### Dependencies + +* Update actions/checkout action to v3.4.0 ([#2575](https://github.com/googleapis/java-bigquery/issues/2575)) ([6935a1e](https://github.com/googleapis/java-bigquery/commit/6935a1e353d496e6de656de3431563b3527456e5)) +* Update actions/upload-artifact action to v3.1.2 ([#2571](https://github.com/googleapis/java-bigquery/issues/2571)) ([aa0c70e](https://github.com/googleapis/java-bigquery/commit/aa0c70ec6fe0ae859b0944101373ebabb0bb4600)) +* Update cloud client dependencies ([#2583](https://github.com/googleapis/java-bigquery/issues/2583)) ([dcacc31](https://github.com/googleapis/java-bigquery/commit/dcacc3150b3dbcd9c54038035cec3a7b1946af6c)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.15.0 ([#2577](https://github.com/googleapis/java-bigquery/issues/2577)) ([eaf09d6](https://github.com/googleapis/java-bigquery/commit/eaf09d65c00a742732b918478021dde3b12ac1bd)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20230311-2.0.0 ([#2578](https://github.com/googleapis/java-bigquery/issues/2578)) ([aab037c](https://github.com/googleapis/java-bigquery/commit/aab037c8eb676b42231c99b5890c991095d2f8c2)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.5.0 ([#2580](https://github.com/googleapis/java-bigquery/issues/2580)) ([1764eeb](https://github.com/googleapis/java-bigquery/commit/1764eeb8d56ab3e5bda9b554414b5fe4d022fb72)) +* Update dependency com.google.cloud:google-cloud-storage to v2.20.0 ([#2559](https://github.com/googleapis/java-bigquery/issues/2559)) ([8a854db](https://github.com/googleapis/java-bigquery/commit/8a854dbdcb676e2b2873ddfadf514f2e401fe987)) +* Update github/codeql-action action to v2.2.7 ([#2572](https://github.com/googleapis/java-bigquery/issues/2572)) ([105f5ee](https://github.com/googleapis/java-bigquery/commit/105f5ee8d8882f79688dec685f20f44817bf313a)) + +## [2.23.2](https://github.com/googleapis/java-bigquery/compare/v2.23.1...v2.23.2) (2023-03-07) + + +### Bug Fixes + +* External table definition parquet format options ([#2535](https://github.com/googleapis/java-bigquery/issues/2535)) ([eb45973](https://github.com/googleapis/java-bigquery/commit/eb4597314fad72bbdb666a832f0f15f732f40817)) + + +### Documentation + +* Remove stale snippet comment ([#2555](https://github.com/googleapis/java-bigquery/issues/2555)) ([a71b1b2](https://github.com/googleapis/java-bigquery/commit/a71b1b2abb568bd1ed088f7bd4b77f93a68ec95e)) + +## [2.23.1](https://github.com/googleapis/java-bigquery/compare/v2.23.0...v2.23.1) (2023-03-02) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.14.0 ([#2545](https://github.com/googleapis/java-bigquery/issues/2545)) ([ad78ebb](https://github.com/googleapis/java-bigquery/commit/ad78ebb35a5b6d7d86d59e6c0fa078c68a65a275)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.18.0 ([#2546](https://github.com/googleapis/java-bigquery/issues/2546)) ([60e45e4](https://github.com/googleapis/java-bigquery/commit/60e45e457edfd257e16e37f0c0d5049dd722f0e3)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.4.0 ([#2547](https://github.com/googleapis/java-bigquery/issues/2547)) ([2588582](https://github.com/googleapis/java-bigquery/commit/25885821328ecb72c57d9ebeb548a6710d186381)) + +## [2.23.0](https://github.com/googleapis/java-bigquery/compare/v2.22.0...v2.23.0) (2023-02-22) + + +### Features + +* Add support for session_id in load jobs ([#2519](https://github.com/googleapis/java-bigquery/issues/2519)) ([e431c17](https://github.com/googleapis/java-bigquery/commit/e431c17efe0f69d084f119463ca8bdb25047a7fe)) + + +### Dependencies + +* Update cloud client dependencies ([#2526](https://github.com/googleapis/java-bigquery/issues/2526)) ([4d88ccc](https://github.com/googleapis/java-bigquery/commit/4d88ccc22b86ae83220324c53c2430f7878473bd)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.13.0 ([#2533](https://github.com/googleapis/java-bigquery/issues/2533)) ([ed2cb74](https://github.com/googleapis/java-bigquery/commit/ed2cb7436c2ba38bdc4975abd12ba88d231087db)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20230210-2.0.0 ([#2530](https://github.com/googleapis/java-bigquery/issues/2530)) ([62ff092](https://github.com/googleapis/java-bigquery/commit/62ff092908bbc2cf26e8cb9426bdc0f45d1b2b9e)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.3.0 ([#2534](https://github.com/googleapis/java-bigquery/issues/2534)) ([f1bcc33](https://github.com/googleapis/java-bigquery/commit/f1bcc331d61f966f9c0c29dd3dccb122cafc874d)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.20 ([#2527](https://github.com/googleapis/java-bigquery/issues/2527)) ([5fe5e74](https://github.com/googleapis/java-bigquery/commit/5fe5e74afd4af873213455010b73dcf8240e008d)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.20 ([#2528](https://github.com/googleapis/java-bigquery/issues/2528)) ([554e75d](https://github.com/googleapis/java-bigquery/commit/554e75df92aa0dbd16d65d51a6c845c914062059)) + +## [2.22.0](https://github.com/googleapis/java-bigquery/compare/v2.21.0...v2.22.0) (2023-02-08) + + +### Features + +* Add collation for Case sensitive string column ([#2490](https://github.com/googleapis/java-bigquery/issues/2490)) ([3257737](https://github.com/googleapis/java-bigquery/commit/325773757e811172236eb3221926025b82f5db64)) + + +### Dependencies + +* Update arrow.version to v11 (major) ([#2495](https://github.com/googleapis/java-bigquery/issues/2495)) ([94ed060](https://github.com/googleapis/java-bigquery/commit/94ed06089239a0df0ffebf7f8470a38c16be95bf)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.11.0 ([#2482](https://github.com/googleapis/java-bigquery/issues/2482)) ([e6ffb9b](https://github.com/googleapis/java-bigquery/commit/e6ffb9b594e3bb680c4904f822c39653d626c4d3)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.12.0 ([#2512](https://github.com/googleapis/java-bigquery/issues/2512)) ([09f280d](https://github.com/googleapis/java-bigquery/commit/09f280d15389838dcc7ff9d8c1f485041559051e)) +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v2.31.0 ([#2499](https://github.com/googleapis/java-bigquery/issues/2499)) ([c0a393c](https://github.com/googleapis/java-bigquery/commit/c0a393ca3aae372dd42477c9b54bd785fc17ab20)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.15.0 ([#2483](https://github.com/googleapis/java-bigquery/issues/2483)) ([5c2bf69](https://github.com/googleapis/java-bigquery/commit/5c2bf69b227ca4efb55b42c06a747426183f8ae5)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.2.0 ([#2513](https://github.com/googleapis/java-bigquery/issues/2513)) ([02832dd](https://github.com/googleapis/java-bigquery/commit/02832ddcb62d4701cd3568eae806586917b359b5)) + +## [2.21.0](https://github.com/googleapis/java-bigquery/compare/v2.20.2...v2.21.0) (2023-01-23) + + +### Features + +* Migrate from google-http-client-jackson2 to google-http-client-gson ([#2471](https://github.com/googleapis/java-bigquery/issues/2471)) ([09a8382](https://github.com/googleapis/java-bigquery/commit/09a8382222e1174c46989e797e0941cc36d387f1)) + + +### Bug Fixes + +* **java:** Skip fixing poms for special modules ([#1744](https://github.com/googleapis/java-bigquery/issues/1744)) ([#2474](https://github.com/googleapis/java-bigquery/issues/2474)) ([4e8bbe0](https://github.com/googleapis/java-bigquery/commit/4e8bbe0808b2aabea889042135e3a388d2f8ecff)) + + +### Dependencies + +* Update com.google.cloud:google-cloud-bigquerystorage-bom to v2.28.3 ([5a20c32](https://github.com/googleapis/java-bigquery/commit/5a20c327d9b863fbde9a8fb10f61f4c706b446a8)) +* Update com.google.cloud:google-cloud-datacatalog-bom to v1.14.0 ([606fc1e](https://github.com/googleapis/java-bigquery/commit/606fc1e26c7c6f407a1938058b07699d84615bc7)) +* Update com.google.cloud:google-cloud-storage to v2.17.1 ([#2465](https://github.com/googleapis/java-bigquery/issues/2465)) ([606fc1e](https://github.com/googleapis/java-bigquery/commit/606fc1e26c7c6f407a1938058b07699d84615bc7)) +* Update com.google.cloud:google-cloud-storage to v2.17.2 ([#2479](https://github.com/googleapis/java-bigquery/issues/2479)) ([5a20c32](https://github.com/googleapis/java-bigquery/commit/5a20c327d9b863fbde9a8fb10f61f4c706b446a8)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.10.0 ([#2463](https://github.com/googleapis/java-bigquery/issues/2463)) ([5bfa7ae](https://github.com/googleapis/java-bigquery/commit/5bfa7aeb8ca48fab438d03b2ba1a19a8673d85a9)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20230114-2.0.0 ([#2477](https://github.com/googleapis/java-bigquery/issues/2477)) ([b6409d5](https://github.com/googleapis/java-bigquery/commit/b6409d52adde116bf073ec71e8968fd68b983e65)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.1.2 ([#2476](https://github.com/googleapis/java-bigquery/issues/2476)) ([688b6a6](https://github.com/googleapis/java-bigquery/commit/688b6a65b890668591451e8f3d01f9f15527c80d)) + +## [2.20.2](https://github.com/googleapis/java-bigquery/compare/v2.20.1...v2.20.2) (2023-01-12) + + +### Dependencies + +* Update com.google.cloud:google-cloud-bigquerystorage-bom to 2.28.1 ([b62391d](https://github.com/googleapis/java-bigquery/commit/b62391d9cbf5fab30ff5df488d2835be45c5b8d8)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20221209-2.0.0 ([#2449](https://github.com/googleapis/java-bigquery/issues/2449)) ([9d0a107](https://github.com/googleapis/java-bigquery/commit/9d0a107da5e1e600dddccae79eb83358fd6940c6)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20221217-2.0.0 ([#2459](https://github.com/googleapis/java-bigquery/issues/2459)) ([08e2927](https://github.com/googleapis/java-bigquery/commit/08e2927ecb05ff3fda03493a99f466a6d2417d20)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.1.1 ([#2455](https://github.com/googleapis/java-bigquery/issues/2455)) ([412710c](https://github.com/googleapis/java-bigquery/commit/412710c25d637efeac5a2242ad841b11a788708a)) +* Update dependency org.junit.vintage:junit-vintage-engine to v5.9.2 ([#2456](https://github.com/googleapis/java-bigquery/issues/2456)) ([04d47ad](https://github.com/googleapis/java-bigquery/commit/04d47ad3b8e9b0078884f81c227eb96e87d21aa2)) +* Update dependency org.threeten:threeten-extra to v1.7.2 ([#2450](https://github.com/googleapis/java-bigquery/issues/2450)) ([485be56](https://github.com/googleapis/java-bigquery/commit/485be563ba50974dd40069072b56d319ad40a70a)) + +## [2.20.1](https://github.com/googleapis/java-bigquery/compare/v2.20.0...v2.20.1) (2023-01-04) + + +### Dependencies + +* Update dependency com.google.cloud:google-cloud-bigquerystorage-bom to v2.28.0 ([#2451](https://github.com/googleapis/java-bigquery/issues/2451)) ([4b760e1](https://github.com/googleapis/java-bigquery/commit/4b760e1049c6ddbfcb17b7de5905f43a53165c3d)) + +## [2.20.0](https://github.com/googleapis/java-bigquery/compare/v2.19.1...v2.20.0) (2022-12-13) + + +### Features + +* Add fast query path support when empty jobId object is passed ([#2349](https://github.com/googleapis/java-bigquery/issues/2349)) ([42c083a](https://github.com/googleapis/java-bigquery/commit/42c083ac680c657bf3f648fbce81004ecac8be87)) +* Next release from main branch is 2.20.0 ([#2405](https://github.com/googleapis/java-bigquery/issues/2405)) ([9297a43](https://github.com/googleapis/java-bigquery/commit/9297a4359f7b080a60b6bb5873edfd66cd7d2261)) + + +### Dependencies + +* Update arrow.version to v10.0.1 ([#2426](https://github.com/googleapis/java-bigquery/issues/2426)) ([aff9019](https://github.com/googleapis/java-bigquery/commit/aff901904d04a9a35042126a90e2a2826283a3e7)) +* Update cloud client dependencies ([#2444](https://github.com/googleapis/java-bigquery/issues/2444)) ([7255357](https://github.com/googleapis/java-bigquery/commit/7255357fc3bc715ebe15761acbe83c5e33495fdc)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.8.0 ([#2418](https://github.com/googleapis/java-bigquery/issues/2418)) ([1ac1653](https://github.com/googleapis/java-bigquery/commit/1ac1653ed705fa7173c4a83ab37169fdb15422e3)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.9.0 ([#2441](https://github.com/googleapis/java-bigquery/issues/2441)) ([01cc3c3](https://github.com/googleapis/java-bigquery/commit/01cc3c3d64884c2d7b68c5099de7c0959c1846db)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20221127-2.0.0 ([#2437](https://github.com/googleapis/java-bigquery/issues/2437)) ([eb52002](https://github.com/googleapis/java-bigquery/commit/eb52002919843ad9341d3f9b06c10e401637b82a)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.12.0 ([#2419](https://github.com/googleapis/java-bigquery/issues/2419)) ([c449031](https://github.com/googleapis/java-bigquery/commit/c4490315b62606371e134f2a9c2fbfabc60bee03)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.1.0 ([#2435](https://github.com/googleapis/java-bigquery/issues/2435)) ([c99b215](https://github.com/googleapis/java-bigquery/commit/c99b21552e30a509b6220de7a491566dbab086db)) +* Update dependency com.google.cloud:google-cloud-storage to v2.15.1 ([#2420](https://github.com/googleapis/java-bigquery/issues/2420)) ([baf337a](https://github.com/googleapis/java-bigquery/commit/baf337a12e89af73db0c2494e61f271f32e44ed0)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.18 ([#2424](https://github.com/googleapis/java-bigquery/issues/2424)) ([63b5196](https://github.com/googleapis/java-bigquery/commit/63b51969dc20747d3dd1f127cc0fcb2d27c9c8c0)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.19 ([#2432](https://github.com/googleapis/java-bigquery/issues/2432)) ([396c6dc](https://github.com/googleapis/java-bigquery/commit/396c6dc101837a801c7f693f3a3548eb6685feaf)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.18 ([#2425](https://github.com/googleapis/java-bigquery/issues/2425)) ([cd2ae9f](https://github.com/googleapis/java-bigquery/commit/cd2ae9f3d2c19be8a375e6be57a7e6b805b1ac4e)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.19 ([#2433](https://github.com/googleapis/java-bigquery/issues/2433)) ([bf94087](https://github.com/googleapis/java-bigquery/commit/bf940878a7cf602b7f0736335653bc6c479e2df6)) +* Update jmh.version to v1.36 ([#2415](https://github.com/googleapis/java-bigquery/issues/2415)) ([0676586](https://github.com/googleapis/java-bigquery/commit/06765866bf4507f8f6ebbaee28e7dc698f9ba14c)) + +## [2.19.1](https://github.com/googleapis/java-bigquery/compare/v2.19.0...v2.19.1) (2022-11-08) + + +### Dependencies + +* Update dependency com.google.cloud:google-cloud-storage to v2.15.0 ([#2402](https://github.com/googleapis/java-bigquery/issues/2402)) ([aac2711](https://github.com/googleapis/java-bigquery/commit/aac27119142e1411eb2f8b0270c806262f08d391)) + +## [2.19.0](https://github.com/googleapis/java-bigquery/compare/v2.18.2...v2.19.0) (2022-11-07) + + +### Features + +* Add getTimestampInstant() method to FieldValue ([#2350](https://github.com/googleapis/java-bigquery/issues/2350)) ([113303f](https://github.com/googleapis/java-bigquery/commit/113303fb41ec4855bb81a5bd3c7f8984bc70da3e)) + + +### Dependencies + +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20221028-2.0.0 ([#2393](https://github.com/googleapis/java-bigquery/issues/2393)) ([d3f6a6b](https://github.com/googleapis/java-bigquery/commit/d3f6a6bf55697541cfdc3bcdd2c441e8bd21dbc2)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.0.6 ([#2399](https://github.com/googleapis/java-bigquery/issues/2399)) ([9de9aa8](https://github.com/googleapis/java-bigquery/commit/9de9aa8f7eb2e21b88a13df23b65e2aaf6b749cf)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.17 ([#2396](https://github.com/googleapis/java-bigquery/issues/2396)) ([87f8cdd](https://github.com/googleapis/java-bigquery/commit/87f8cdd7b0b005430486e51e4c339a6de95b0011)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.17 ([#2397](https://github.com/googleapis/java-bigquery/issues/2397)) ([7927350](https://github.com/googleapis/java-bigquery/commit/7927350f562dbdef8774df82c9c0d528118d0213)) + +## [2.18.2](https://github.com/googleapis/java-bigquery/compare/v2.18.1...v2.18.2) (2022-10-28) + + +### Dependencies + +* Remove duplicated deps in the pom. ([#2383](https://github.com/googleapis/java-bigquery/issues/2383)) ([fe164aa](https://github.com/googleapis/java-bigquery/commit/fe164aad572b74c21de2ce492f0f7c28bd07a7df)) + +## [2.18.1](https://github.com/googleapis/java-bigquery/compare/v2.18.0...v2.18.1) (2022-10-28) + + +### Dependencies + +* Remove version declaration for gson ([#2379](https://github.com/googleapis/java-bigquery/issues/2379)) ([0908652](https://github.com/googleapis/java-bigquery/commit/0908652a2e8baf46b142a8ba31c0967e593986d6)) + +## [2.18.0](https://github.com/googleapis/java-bigquery/compare/v2.17.1...v2.18.0) (2022-10-27) + + +### Features + +* Add executeSelectAsync and Refactor ([#2294](https://github.com/googleapis/java-bigquery/issues/2294)) ([80fa478](https://github.com/googleapis/java-bigquery/commit/80fa47834f3ef536f553702dee3ddc80e18981bb)) + + +### Bug Fixes + +* Add --add-opens arg to native-image command ([#2369](https://github.com/googleapis/java-bigquery/issues/2369)) ([8e8b6d7](https://github.com/googleapis/java-bigquery/commit/8e8b6d70e228a63b5dde00b828765110b0222d20)) +* Properly handle external table schema on table update ([#2236](https://github.com/googleapis/java-bigquery/issues/2236)) ([460ef31](https://github.com/googleapis/java-bigquery/commit/460ef318297fe5562a983f64c407b7c0fa5a9a8b)) + + +### Dependencies + +* Update arrow.version to v10 (major) ([#2371](https://github.com/googleapis/java-bigquery/issues/2371)) ([b7873db](https://github.com/googleapis/java-bigquery/commit/b7873db46e174c755657ddcecbb02c0e495c9a1f)) +* Update cloud client dependencies ([#2362](https://github.com/googleapis/java-bigquery/issues/2362)) ([0936699](https://github.com/googleapis/java-bigquery/commit/09366996e281354cc423cbc3ac97a11b0d48eda6)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.6.0 ([#2355](https://github.com/googleapis/java-bigquery/issues/2355)) ([7bc59a7](https://github.com/googleapis/java-bigquery/commit/7bc59a77a6f3821ac19088a8ee864f5d24bdee2e)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.7.0 ([#2366](https://github.com/googleapis/java-bigquery/issues/2366)) ([02102d3](https://github.com/googleapis/java-bigquery/commit/02102d3fb873e68827a8630a4eb34d4bcabd5f9d)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20221015-2.0.0 ([#2370](https://github.com/googleapis/java-bigquery/issues/2370)) ([9b796cf](https://github.com/googleapis/java-bigquery/commit/9b796cf0b14094f9442c7e21d7789a673691b87d)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.10.0 ([#2356](https://github.com/googleapis/java-bigquery/issues/2356)) ([edb2ca0](https://github.com/googleapis/java-bigquery/commit/edb2ca03f2e216d6a1083a9dc2bc7f74bed9d3a5)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.0.5 ([#2361](https://github.com/googleapis/java-bigquery/issues/2361)) ([51b2258](https://github.com/googleapis/java-bigquery/commit/51b2258bbfa542c822668240c8d5f7cc6c63e03c)) +* Update dependency com.google.code.gson:gson to v2.10 ([#2367](https://github.com/googleapis/java-bigquery/issues/2367)) ([82e3de5](https://github.com/googleapis/java-bigquery/commit/82e3de5f76644e3530ac795a5eafd1dac4c3be07)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.15 ([#2352](https://github.com/googleapis/java-bigquery/issues/2352)) ([b0f172c](https://github.com/googleapis/java-bigquery/commit/b0f172c1863bbe66c366a75c4a5c06ee5ba049d0)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.16 ([#2358](https://github.com/googleapis/java-bigquery/issues/2358)) ([f4e5fc5](https://github.com/googleapis/java-bigquery/commit/f4e5fc59f4b9bc63c763ec1dc8b75f87defc9ced)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.15 ([#2353](https://github.com/googleapis/java-bigquery/issues/2353)) ([ac9226c](https://github.com/googleapis/java-bigquery/commit/ac9226c7a6297d686c0bd77939f084e3faf6090a)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.16 ([#2359](https://github.com/googleapis/java-bigquery/issues/2359)) ([52ec31a](https://github.com/googleapis/java-bigquery/commit/52ec31a6dc3705e09e7ce9cd815241a0fb2cc5d6)) + +## [2.17.1](https://github.com/googleapis/java-bigquery/compare/v2.17.0...v2.17.1) (2022-10-10) + + +### Dependencies + +* Update cloud client dependencies ([#2335](https://github.com/googleapis/java-bigquery/issues/2335)) ([f8053d7](https://github.com/googleapis/java-bigquery/commit/f8053d7773d225b29e669976c6123b5d30ccd6a8)) +* Update cloud client dependencies ([#2337](https://github.com/googleapis/java-bigquery/issues/2337)) ([1194eac](https://github.com/googleapis/java-bigquery/commit/1194eacf23d947a0d923a3b3fd3f9460dfc996b3)) +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.5.6 ([#2336](https://github.com/googleapis/java-bigquery/issues/2336)) ([a86c759](https://github.com/googleapis/java-bigquery/commit/a86c7594d0c9e8a480297b028e108c86f4a1e12a)) + +## [2.17.0](https://github.com/googleapis/java-bigquery/compare/v2.16.1...v2.17.0) (2022-10-03) + + +### Features + +* Add remote function options to routine metadata ([#2291](https://github.com/googleapis/java-bigquery/issues/2291)) ([d30670e](https://github.com/googleapis/java-bigquery/commit/d30670ee2edf498b0335f3dfdec3487f5627a9f3)) + + +### Dependencies + +* Update dependency com.google.api.grpc:proto-google-cloud-bigqueryconnection-v1 to v2.5.5 ([#2328](https://github.com/googleapis/java-bigquery/issues/2328)) ([6e48ec2](https://github.com/googleapis/java-bigquery/commit/6e48ec22f98f95cc93a6a0e2a068d8a4d8c822ca)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20220913-2.0.0 ([#2287](https://github.com/googleapis/java-bigquery/issues/2287)) ([fa33184](https://github.com/googleapis/java-bigquery/commit/fa331844dc1862120867d73ad87d87587a388576)) +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20220924-2.0.0 ([#2325](https://github.com/googleapis/java-bigquery/issues/2325)) ([82c2097](https://github.com/googleapis/java-bigquery/commit/82c2097a866804ffb95a871087438fc163e8b77c)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.0.4 ([#2327](https://github.com/googleapis/java-bigquery/issues/2327)) ([188c779](https://github.com/googleapis/java-bigquery/commit/188c77995cad31b328cfbf745df164f4ac70b692)) +* Update dependency gcp-releasetool to v1.8.9 ([#2326](https://github.com/googleapis/java-bigquery/issues/2326)) ([52dfd13](https://github.com/googleapis/java-bigquery/commit/52dfd13a4d311526c784397f50ca5cf45b60f2a5)) +* Update dependency importlib-metadata to v4.13.0 ([#2323](https://github.com/googleapis/java-bigquery/issues/2323)) ([4c7e089](https://github.com/googleapis/java-bigquery/commit/4c7e089f281c7147cd468fbdbd19cd7238b49be3)) +* Update dependency importlib-metadata to v5 ([#2324](https://github.com/googleapis/java-bigquery/issues/2324)) ([bd43cf4](https://github.com/googleapis/java-bigquery/commit/bd43cf42443feba02d7970d3dd17c11d1b64872c)) +* Update dependency org.graalvm.buildtools:junit-platform-native to v0.9.14 ([#2288](https://github.com/googleapis/java-bigquery/issues/2288)) ([959519c](https://github.com/googleapis/java-bigquery/commit/959519cd9e5910ba7d93cce00c318ed322dcaf23)) +* Update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.14 ([#2289](https://github.com/googleapis/java-bigquery/issues/2289)) ([3cf7ef8](https://github.com/googleapis/java-bigquery/commit/3cf7ef83d891480bf80fcb1879ca86e9e053304e)) +* Update dependency org.junit.vintage:junit-vintage-engine to v5.9.1 ([#2285](https://github.com/googleapis/java-bigquery/issues/2285)) ([65fac18](https://github.com/googleapis/java-bigquery/commit/65fac188db2514ae620fb5146055591cfe6ac995)) + +## [2.16.1](https://github.com/googleapis/java-bigquery/compare/v2.16.0...v2.16.1) (2022-09-15) + + +### Dependencies + +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.0.3 ([#2274](https://github.com/googleapis/java-bigquery/issues/2274)) ([4c9952b](https://github.com/googleapis/java-bigquery/commit/4c9952b4f8bc81a66f2a43ecbb9fa85774ed8a93)) + +## [2.16.0](https://github.com/googleapis/java-bigquery/compare/v2.15.0...v2.16.0) (2022-09-12) + + +### Features + +* Add preserveAsciiControlCharacters to CsvOptions ([#2143](https://github.com/googleapis/java-bigquery/issues/2143)) ([856893f](https://github.com/googleapis/java-bigquery/commit/856893f4d8f1b419365d8f179ce9f9e571dec718)) +* Add reference file schema option for federated formats ([#2269](https://github.com/googleapis/java-bigquery/issues/2269)) ([8c488e6](https://github.com/googleapis/java-bigquery/commit/8c488e64259bd67716342f48f96d2932c5e57c3e)) + + +### Bug Fixes + +* Socket-timeout at bigquery.it.ITNightlyBigQueryTest: testForTableNotFound ([#2260](https://github.com/googleapis/java-bigquery/issues/2260)) ([a9b5fb2](https://github.com/googleapis/java-bigquery/commit/a9b5fb2c1078788ddb1ac3169c9ce597af228ac0)) + + +### Dependencies + +* Update dependency com.google.apis:google-api-services-bigquery to v2-rev20220827-2.0.0 ([#2261](https://github.com/googleapis/java-bigquery/issues/2261)) ([3c67d21](https://github.com/googleapis/java-bigquery/commit/3c67d21c10f66b3c5313a1733f4e81db42c1b7c3)) +* Update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.9.3 ([#2259](https://github.com/googleapis/java-bigquery/issues/2259)) ([5e30a04](https://github.com/googleapis/java-bigquery/commit/5e30a04e5b14b03e60e587787180b27f605d6abd)) +* Update dependency com.google.cloud:google-cloud-shared-dependencies to v3.0.2 ([#2267](https://github.com/googleapis/java-bigquery/issues/2267)) ([8472fe5](https://github.com/googleapis/java-bigquery/commit/8472fe580a8197aaa3957dd3231fed0a9511fbb5)) + +## [2.15.0](https://github.com/googleapis/java-bigquery/compare/v2.14.7...v2.15.0) (2022-08-25) + + +### Features + +* add preview support for default values ([#2244](https://github.com/googleapis/java-bigquery/issues/2244)) ([fd3d3c5](https://github.com/googleapis/java-bigquery/commit/fd3d3c57afed84b4d00aab438d79472a6afa001b)) + +## [2.14.7](https://github.com/googleapis/java-bigquery/compare/v2.14.6...v2.14.7) (2022-08-23) + + +### Bug Fixes + +* table-not-found issue with executeSelect while running long queries ([#2222](https://github.com/googleapis/java-bigquery/issues/2222)) ([4876569](https://github.com/googleapis/java-bigquery/commit/487656973fe3e06d838c1b495ac024ab2c6810f6)) + +## [2.14.6](https://github.com/googleapis/java-bigquery/compare/v2.14.5...v2.14.6) (2022-08-12) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.9.2 ([#2221](https://github.com/googleapis/java-bigquery/issues/2221)) ([3292cdd](https://github.com/googleapis/java-bigquery/commit/3292cddeec7c83fa198a96d80a35c13b003a26c8)) + +## [2.14.5](https://github.com/googleapis/java-bigquery/compare/v2.14.4...v2.14.5) (2022-08-12) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220806-2.0.0 ([#2223](https://github.com/googleapis/java-bigquery/issues/2223)) ([05d1de1](https://github.com/googleapis/java-bigquery/commit/05d1de19488c45ceb202824d9ce2ae0fd290d930)) + +## [2.14.4](https://github.com/googleapis/java-bigquery/compare/v2.14.3...v2.14.4) (2022-08-08) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v2.11.3 ([#2213](https://github.com/googleapis/java-bigquery/issues/2213)) ([a293ab5](https://github.com/googleapis/java-bigquery/commit/a293ab56c5455cef8b9731784ddd78cc6162dca8)) + +## [2.14.3](https://github.com/googleapis/java-bigquery/compare/v2.14.2...v2.14.3) (2022-08-05) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220730-2.0.0 ([#2208](https://github.com/googleapis/java-bigquery/issues/2208)) ([5165e2b](https://github.com/googleapis/java-bigquery/commit/5165e2b3d4001d58daa2a60b553926d938848ee6)) +* update dependency com.google.cloud:google-cloud-storage to v2.11.2 ([#2207](https://github.com/googleapis/java-bigquery/issues/2207)) ([da5389d](https://github.com/googleapis/java-bigquery/commit/da5389d78c5136f01c16d23f4f7ec54c6b4f3010)) + +## [2.14.2](https://github.com/googleapis/java-bigquery/compare/v2.14.1...v2.14.2) (2022-08-04) + + +### Dependencies + +* update arrow.version to v9 (major) ([#2201](https://github.com/googleapis/java-bigquery/issues/2201)) ([3ec5ef9](https://github.com/googleapis/java-bigquery/commit/3ec5ef987425315a0dc4d2ab9a4dc162cf000156)) +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220716-2.0.0 ([#2202](https://github.com/googleapis/java-bigquery/issues/2202)) ([c1ca09e](https://github.com/googleapis/java-bigquery/commit/c1ca09e41bb9d4b070e241437b46d717e66f4944)) +* update dependency com.google.cloud:google-cloud-bigquerystorage-bom to 2.18.0 ([c1ca09e](https://github.com/googleapis/java-bigquery/commit/c1ca09e41bb9d4b070e241437b46d717e66f4944)) +* update dependency com.google.cloud:google-cloud-datacatalog-bom to 1.9.1 ([c1ca09e](https://github.com/googleapis/java-bigquery/commit/c1ca09e41bb9d4b070e241437b46d717e66f4944)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v3 ([c1ca09e](https://github.com/googleapis/java-bigquery/commit/c1ca09e41bb9d4b070e241437b46d717e66f4944)) +* update dependency com.google.cloud:google-cloud-storage to 2.11.0 ([c1ca09e](https://github.com/googleapis/java-bigquery/commit/c1ca09e41bb9d4b070e241437b46d717e66f4944)) +* update dependency com.google.cloud:google-cloud-storage to v2.11.1 ([#2194](https://github.com/googleapis/java-bigquery/issues/2194)) ([45be001](https://github.com/googleapis/java-bigquery/commit/45be00165846010afd43e184d94b81d4254f5cd5)) +* update dependency com.google.code.gson:gson to v2.9.1 ([#2190](https://github.com/googleapis/java-bigquery/issues/2190)) ([4bd4539](https://github.com/googleapis/java-bigquery/commit/4bd4539be4aa2ced4eeefde4b48fdbaa5faf5801)) +* update dependency org.threeten:threeten-extra to v1.7.1 ([c1ca09e](https://github.com/googleapis/java-bigquery/commit/c1ca09e41bb9d4b070e241437b46d717e66f4944)) + + +### Documentation + +* **owlbot-java:** explaining why not using formatter in pom.xml ([#1511](https://github.com/googleapis/java-bigquery/issues/1511)) ([#2195](https://github.com/googleapis/java-bigquery/issues/2195)) ([7c45aa5](https://github.com/googleapis/java-bigquery/commit/7c45aa5bf78e2c15534cdd6d3d9af572ea871e57)), closes [#1502](https://github.com/googleapis/java-bigquery/issues/1502) + +## [2.14.1](https://github.com/googleapis/java-bigquery/compare/v2.14.0...v2.14.1) (2022-07-27) + + +### Dependencies + +* update dependency org.junit.vintage:junit-vintage-engine to v5.9.0 ([#2183](https://github.com/googleapis/java-bigquery/issues/2183)) ([f8325cf](https://github.com/googleapis/java-bigquery/commit/f8325cff22af3f087b23d6376ab96e78648efd00)) + +## [2.14.0](https://github.com/googleapis/java-bigquery/compare/v2.13.8...v2.14.0) (2022-07-22) + + +### Features + +* Add decimal target type ([#2166](https://github.com/googleapis/java-bigquery/issues/2166)) ([ebbd8f5](https://github.com/googleapis/java-bigquery/commit/ebbd8f52853d3c0ca918a47d826474cc5825a58a)) +* **bigquery:** enable use of GEOGRAPHY query params ([#2158](https://github.com/googleapis/java-bigquery/issues/2158)) ([b19ad76](https://github.com/googleapis/java-bigquery/commit/b19ad767a53a9bd5d14b4cb36716cbb1c7b44ed6)) + + +### Bug Fixes + +* Add query dryRun logic to get the schema when null schema is returned from the backend ([#2106](https://github.com/googleapis/java-bigquery/issues/2106)) ([c98d22b](https://github.com/googleapis/java-bigquery/commit/c98d22b2b4f45e20d7d0666c5342cdbfadd30bde)) +* enable longpaths support for windows test ([#1485](https://github.com/googleapis/java-bigquery/issues/1485)) ([#2164](https://github.com/googleapis/java-bigquery/issues/2164)) ([e18b9f8](https://github.com/googleapis/java-bigquery/commit/e18b9f8b4d2f194577b1710ad64710fe0f3d88d9)) +* **java:** make field accessible to address Java 17 issue with arrow ([#2165](https://github.com/googleapis/java-bigquery/issues/2165)) ([d605b81](https://github.com/googleapis/java-bigquery/commit/d605b8149954e79c05461630915b674e11793889)) + + +### Dependencies + +* update dependency org.graalvm.buildtools:junit-platform-native to v0.9.13 ([#2160](https://github.com/googleapis/java-bigquery/issues/2160)) ([970135b](https://github.com/googleapis/java-bigquery/commit/970135bec33b831925476855da9a84c34311068d)) +* update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.13 ([#2161](https://github.com/googleapis/java-bigquery/issues/2161)) ([3507bf7](https://github.com/googleapis/java-bigquery/commit/3507bf7c9fc2aef299d06d9771cfcc06e3080b87)) + +## [2.13.8](https://github.com/googleapis/java-bigquery/compare/v2.13.7...v2.13.8) (2022-07-01) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v2.9.0 ([#2149](https://github.com/googleapis/java-bigquery/issues/2149)) ([a07c714](https://github.com/googleapis/java-bigquery/commit/a07c714cb90c7ff62a43f7500abe8d54a5cd0936)) + +## [2.13.7](https://github.com/googleapis/java-bigquery/compare/v2.13.6...v2.13.7) (2022-06-29) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.8.4 ([#2140](https://github.com/googleapis/java-bigquery/issues/2140)) ([c7ef597](https://github.com/googleapis/java-bigquery/commit/c7ef597832505e6c05adb38ac1db5dd15e32d024)) + +## [2.13.6](https://github.com/googleapis/java-bigquery/compare/v2.13.5...v2.13.6) (2022-06-24) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220611-1.32.1 ([#2132](https://github.com/googleapis/java-bigquery/issues/2132)) ([bddefcf](https://github.com/googleapis/java-bigquery/commit/bddefcf647d50ee12fffea80c04613b38b8d02d0)) +* update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.8.3 ([#2135](https://github.com/googleapis/java-bigquery/issues/2135)) ([0bd5ddc](https://github.com/googleapis/java-bigquery/commit/0bd5ddc0df0a978692252e50c37c94f41a3c4e1d)) + +## [2.13.5](https://github.com/googleapis/java-bigquery/compare/v2.13.4...v2.13.5) (2022-06-23) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.13.0 ([#2128](https://github.com/googleapis/java-bigquery/issues/2128)) ([3043533](https://github.com/googleapis/java-bigquery/commit/3043533608c5659be0313f1942d20314d2157fd4)) + +## [2.13.4](https://github.com/googleapis/java-bigquery/compare/v2.13.3...v2.13.4) (2022-06-22) + + +### Dependencies + +* update dependency org.graalvm.buildtools:junit-platform-native to v0.9.12 ([#2124](https://github.com/googleapis/java-bigquery/issues/2124)) ([4542ce9](https://github.com/googleapis/java-bigquery/commit/4542ce9a51d9756a8a06d0e33cf3a40d1e321ade)) +* update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.12 ([#2125](https://github.com/googleapis/java-bigquery/issues/2125)) ([6da965f](https://github.com/googleapis/java-bigquery/commit/6da965f540a2cdb2eaf845301cfbfbf34b9a6866)) + +## [2.13.3](https://github.com/googleapis/java-bigquery/compare/v2.13.2...v2.13.3) (2022-06-16) + + +### Bug Fixes + +* Assertj-core cleanup ([#2102](https://github.com/googleapis/java-bigquery/issues/2102)) ([4630c50](https://github.com/googleapis/java-bigquery/commit/4630c50db7428d888b726297408b7a223b39b28a)) + + +### Documentation + +* **sample:** clean up native image sample README ([#2120](https://github.com/googleapis/java-bigquery/issues/2120)) ([de7b45a](https://github.com/googleapis/java-bigquery/commit/de7b45a52259cec16970e074dd4f526685aa4d09)) + + +### Dependencies + +* update cloud client dependencies ([#2110](https://github.com/googleapis/java-bigquery/issues/2110)) ([30a88f4](https://github.com/googleapis/java-bigquery/commit/30a88f43aea6269e3fbe82544eb2112f25830761)) +* update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.8.2 ([#2101](https://github.com/googleapis/java-bigquery/issues/2101)) ([bdbd3da](https://github.com/googleapis/java-bigquery/commit/bdbd3da4c6c8bb7f2363711691edb31c7711d811)) +* update dependency com.google.oauth-client:google-oauth-client-java6 to v1.34.1 ([#2111](https://github.com/googleapis/java-bigquery/issues/2111)) ([1a0235f](https://github.com/googleapis/java-bigquery/commit/1a0235f9cdea0ae37b2e8b1047ca66395b1af3b0)) +* update dependency com.google.oauth-client:google-oauth-client-jetty to v1.34.1 ([#2112](https://github.com/googleapis/java-bigquery/issues/2112)) ([e52739f](https://github.com/googleapis/java-bigquery/commit/e52739ffcaeb9ca9dc362f07f117f37ecff220c7)) + +## [2.13.2](https://github.com/googleapis/java-bigquery/compare/v2.13.1...v2.13.2) (2022-06-05) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220528-1.32.1 ([#2096](https://github.com/googleapis/java-bigquery/issues/2096)) ([9275750](https://github.com/googleapis/java-bigquery/commit/9275750f76938933e4e062d2eae2f8d5cd99c7e1)) + +## [2.13.1](https://github.com/googleapis/java-bigquery/compare/v2.13.0...v2.13.1) (2022-06-02) + + +### Dependencies + +* update dependency com.google.oauth-client:google-oauth-client-java6 to v1.34.0 ([#2088](https://github.com/googleapis/java-bigquery/issues/2088)) ([ed33496](https://github.com/googleapis/java-bigquery/commit/ed33496950bb25bb754a7bb71c74d73d99d25209)) +* update dependency com.google.oauth-client:google-oauth-client-jetty to v1.34.0 ([#2089](https://github.com/googleapis/java-bigquery/issues/2089)) ([117d390](https://github.com/googleapis/java-bigquery/commit/117d3907fcecaf923d200021ff66503a67dec2a1)) + +## [2.13.0](https://github.com/googleapis/java-bigquery/compare/v2.12.0...v2.13.0) (2022-05-31) + + +### Features + +* add destinationExpirationTime to CopyJobConfiguration ([#2031](https://github.com/googleapis/java-bigquery/issues/2031)) ([9e0b351](https://github.com/googleapis/java-bigquery/commit/9e0b35136aed6ed489bff4d4ac86c4d5d83274be)) + + +### Documentation + +* **samples:** update querypagination sample ([#2074](https://github.com/googleapis/java-bigquery/issues/2074)) ([4e153f5](https://github.com/googleapis/java-bigquery/commit/4e153f525cc600cecdfabec600b166560ba62607)) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-datacatalog-bom to v1.8.1 ([#2076](https://github.com/googleapis/java-bigquery/issues/2076)) ([38d6bae](https://github.com/googleapis/java-bigquery/commit/38d6baefeebe0dc2858d38f6c44ad727b6beba92)) +* update dependency com.google.cloud:google-cloud-storage to v2.7.2 ([#2077](https://github.com/googleapis/java-bigquery/issues/2077)) ([eb443df](https://github.com/googleapis/java-bigquery/commit/eb443dfd5fd26e9c424dcbb1b00af5260a525679)) + +## [2.12.0](https://github.com/googleapis/java-bigquery/compare/v2.11.2...v2.12.0) (2022-05-25) + + +### Features + +* add build scripts for native image testing in Java 17 ([#1440](https://github.com/googleapis/java-bigquery/issues/1440)) ([#2057](https://github.com/googleapis/java-bigquery/issues/2057)) ([065ae78](https://github.com/googleapis/java-bigquery/commit/065ae78ef20052032c245b3fe991808c24ec8077)) + + +### Bug Fixes + +* add more native image configurations for Arrow tests and enable native image tests ([#2053](https://github.com/googleapis/java-bigquery/issues/2053)) ([7f0bfd4](https://github.com/googleapis/java-bigquery/commit/7f0bfd4a42c28f3d2a748474e1ec40740311a734)) +* Flaky testPositionalQueryParameters ([#2059](https://github.com/googleapis/java-bigquery/issues/2059)) ([3764b59](https://github.com/googleapis/java-bigquery/commit/3764b5967c694fa34aef75804333e5a6101d912e)) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2.7.0 ([#2061](https://github.com/googleapis/java-bigquery/issues/2061)) ([1c7a0ab](https://github.com/googleapis/java-bigquery/commit/1c7a0ab157f79772d8da58bfe15f54a7394124e8)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.11.0 ([#2055](https://github.com/googleapis/java-bigquery/issues/2055)) ([9667663](https://github.com/googleapis/java-bigquery/commit/9667663fbec20f262c218f07cce1ada0c9a4bce0)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.12.0 ([#2063](https://github.com/googleapis/java-bigquery/issues/2063)) ([6d3f4be](https://github.com/googleapis/java-bigquery/commit/6d3f4bead2315703015bd75711fcbf19428fad6e)) +* update dependency com.google.cloud:google-cloud-storage to v2.7.0 ([#2064](https://github.com/googleapis/java-bigquery/issues/2064)) ([fd47710](https://github.com/googleapis/java-bigquery/commit/fd47710afdf32fd535f8e2b430156eb4a659a64d)) +* update dependency com.google.cloud:google-cloud-storage to v2.7.1 ([#2066](https://github.com/googleapis/java-bigquery/issues/2066)) ([89962a5](https://github.com/googleapis/java-bigquery/commit/89962a5e3cec0e5a4334454b1bff83fba3d95d4d)) + +### [2.11.2](https://github.com/googleapis/java-bigquery/compare/v2.11.1...v2.11.2) (2022-05-18) + + +### Bug Fixes + +* Flaky connenction close issue ([#2044](https://github.com/googleapis/java-bigquery/issues/2044)) ([9993717](https://github.com/googleapis/java-bigquery/commit/9993717d546c4039cb8c846787fdd131cc0c113f)) +* NPE issue with testMultipleRuns ([#2050](https://github.com/googleapis/java-bigquery/issues/2050)) ([251d468](https://github.com/googleapis/java-bigquery/commit/251d4686d22e0000982bcd891de68491326558fe)) + +### [2.11.1](https://github.com/googleapis/java-bigquery/compare/v2.11.0...v2.11.1) (2022-05-16) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220507-1.32.1 ([#2042](https://github.com/googleapis/java-bigquery/issues/2042)) ([081888e](https://github.com/googleapis/java-bigquery/commit/081888e9ab9bc2c68e607fb11ff1ee40ac58873a)) + +## [2.11.0](https://github.com/googleapis/java-bigquery/compare/v2.10.10...v2.11.0) (2022-05-10) + + +### Features + +* add Connection interface ([#1374](https://github.com/googleapis/java-bigquery/issues/1374)) ([3804275](https://github.com/googleapis/java-bigquery/commit/380427511b33938209241f3800bff05914957f2b)) +* next release from main branch is 2.10.9 ([#1996](https://github.com/googleapis/java-bigquery/issues/1996)) ([f716427](https://github.com/googleapis/java-bigquery/commit/f716427490d4b87573669ab4338335c0844e82c0)) + + +### Bug Fixes + +* add native image configuration for Arrow ([#2018](https://github.com/googleapis/java-bigquery/issues/2018)) ([06cbe69](https://github.com/googleapis/java-bigquery/commit/06cbe69112e3e0d31e4fb90e43dca6e6dc4f1bdf)) +* fix for flaky connection close issue ([#2034](https://github.com/googleapis/java-bigquery/issues/2034)) ([db3daac](https://github.com/googleapis/java-bigquery/commit/db3daacea8a91ab80b7e923f1480874b01cbad0c)) + + +### Documentation + +* **sample:** remove unused dependency and add setup instructions ([#2010](https://github.com/googleapis/java-bigquery/issues/2010)) ([e2e9113](https://github.com/googleapis/java-bigquery/commit/e2e91133718d41349a94b5d93d66699e84233e64)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220422-1.32.1 ([#2017](https://github.com/googleapis/java-bigquery/issues/2017)) ([b9fa786](https://github.com/googleapis/java-bigquery/commit/b9fa786ad1bca2fb763a146fa3bb431ff6860153)) +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220429-1.32.1 ([#2020](https://github.com/googleapis/java-bigquery/issues/2020)) ([78789a5](https://github.com/googleapis/java-bigquery/commit/78789a5530679349dbee691bd1907ffaefc617a6)) + +### [2.10.10](https://github.com/googleapis/java-bigquery/compare/v2.10.9...v2.10.10) (2022-04-18) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2.6.2 ([#1990](https://github.com/googleapis/java-bigquery/issues/1990)) ([4c1cb4c](https://github.com/googleapis/java-bigquery/commit/4c1cb4c13214556b706f1ff8c50a46f881bf2724)) +* update dependency com.google.cloud:google-cloud-storage to v2.6.1 ([#1991](https://github.com/googleapis/java-bigquery/issues/1991)) ([e02bf31](https://github.com/googleapis/java-bigquery/commit/e02bf315737dba50741c1346af8bde6871cb857a)) + +### [2.10.9](https://github.com/googleapis/java-bigquery/compare/v2.10.8...v2.10.9) (2022-04-16) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.10.0 ([#1983](https://github.com/googleapis/java-bigquery/issues/1983)) ([50ac31c](https://github.com/googleapis/java-bigquery/commit/50ac31c598ae8c5aa4e1f6dcde80da704db904a5)) +* update dependency com.google.cloud:native-image-support to v0.13.1 ([#1982](https://github.com/googleapis/java-bigquery/issues/1982)) ([04f4679](https://github.com/googleapis/java-bigquery/commit/04f4679fe376e726b21e313115bfd48adc6cbe6b)) + +### [2.10.8](https://github.com/googleapis/java-bigquery/compare/v2.10.7...v2.10.8) (2022-04-14) + + +### Dependencies + +* update dependency com.google.oauth-client:google-oauth-client-java6 to v1.33.3 ([#1977](https://github.com/googleapis/java-bigquery/issues/1977)) ([b084791](https://github.com/googleapis/java-bigquery/commit/b08479180e212bccae166f3a675d46fb658d3ce8)) +* update dependency com.google.oauth-client:google-oauth-client-jetty to v1.33.3 ([#1978](https://github.com/googleapis/java-bigquery/issues/1978)) ([a24ce6e](https://github.com/googleapis/java-bigquery/commit/a24ce6ead65f3fceff6573e24c10c324650d907b)) + +### [2.10.7](https://github.com/googleapis/java-bigquery/compare/v2.10.6...v2.10.7) (2022-04-08) + + +### Dependencies + +* update dependency com.google.oauth-client:google-oauth-client-java6 to v1.33.2 ([#1969](https://github.com/googleapis/java-bigquery/issues/1969)) ([f1c4b84](https://github.com/googleapis/java-bigquery/commit/f1c4b847a271a86bba7f123945515c721c3b9ae1)) +* update dependency com.google.oauth-client:google-oauth-client-jetty to v1.33.2 ([#1970](https://github.com/googleapis/java-bigquery/issues/1970)) ([22fede5](https://github.com/googleapis/java-bigquery/commit/22fede57884f42001b3af91d5152a4da981d4fbc)) + +### [2.10.6](https://github.com/googleapis/java-bigquery/compare/v2.10.5...v2.10.6) (2022-04-07) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220326-1.32.1 ([#1964](https://github.com/googleapis/java-bigquery/issues/1964)) ([4ff65d6](https://github.com/googleapis/java-bigquery/commit/4ff65d6147c4dc4472a8adfb1840a1d5a3e5a044)) + +### [2.10.5](https://github.com/googleapis/java-bigquery/compare/v2.10.4...v2.10.5) (2022-03-31) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220322-1.32.1 ([#1951](https://github.com/googleapis/java-bigquery/issues/1951)) ([17ba20c](https://github.com/googleapis/java-bigquery/commit/17ba20c5e243327475c447ff180b9b2eef3f2b79)) +* update dependency com.google.cloud:google-cloud-bigtable to v2.6.1 ([#1952](https://github.com/googleapis/java-bigquery/issues/1952)) ([78a6ead](https://github.com/googleapis/java-bigquery/commit/78a6ead8882036da5abc41a3c442e76c2a30265f)) +* update dependency com.google.cloud:google-cloud-storage to v2.6.0 ([#1956](https://github.com/googleapis/java-bigquery/issues/1956)) ([ee9dfce](https://github.com/googleapis/java-bigquery/commit/ee9dfce867900d9eba696aee3ca7bc09e68eccd4)) + +### [2.10.4](https://github.com/googleapis/java-bigquery/compare/v2.10.3...v2.10.4) (2022-03-29) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.9.0 ([#1945](https://github.com/googleapis/java-bigquery/issues/1945)) ([e6a6502](https://github.com/googleapis/java-bigquery/commit/e6a65024498d5c44000fb982953d063710b7c1d5)) +* update dependency com.google.cloud:google-cloud-storage to v2.5.1 ([#1943](https://github.com/googleapis/java-bigquery/issues/1943)) ([c845e80](https://github.com/googleapis/java-bigquery/commit/c845e802a91bb22918913c3fd501eb80b8eb3c4c)) +* update dependency com.google.cloud:native-image-support to v0.12.11 ([#1944](https://github.com/googleapis/java-bigquery/issues/1944)) ([fd54a96](https://github.com/googleapis/java-bigquery/commit/fd54a96f8ccdf7876d914d338cbb2ec7b6cc310a)) +* update jmh.version to v1.35 ([#1938](https://github.com/googleapis/java-bigquery/issues/1938)) ([77b9a20](https://github.com/googleapis/java-bigquery/commit/77b9a20d2aff95a49e6e1e56a50442e3eb04bf3c)) + +### [2.10.3](https://github.com/googleapis/java-bigquery/compare/v2.10.2...v2.10.3) (2022-03-28) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v2.5.0 ([#1936](https://github.com/googleapis/java-bigquery/issues/1936)) ([57e6807](https://github.com/googleapis/java-bigquery/commit/57e680724a1f6717b183ef48ce039600a6c90b9a)) + +### [2.10.2](https://github.com/googleapis/java-bigquery/compare/v2.10.1...v2.10.2) (2022-03-24) + + +### Dependencies + +* update dependency org.graalvm.buildtools:junit-platform-native to v0.9.11 ([#1931](https://github.com/googleapis/java-bigquery/issues/1931)) ([d6af865](https://github.com/googleapis/java-bigquery/commit/d6af865bcccbdc3294d2c9442272f2f02bad3f8f)) +* update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.11 ([#1932](https://github.com/googleapis/java-bigquery/issues/1932)) ([bc444b2](https://github.com/googleapis/java-bigquery/commit/bc444b2cad99b8185a37a226abd2d315c95c6568)) + +### [2.10.1](https://github.com/googleapis/java-bigquery/compare/v2.10.0...v2.10.1) (2022-03-21) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220313-1.32.1 ([#1925](https://github.com/googleapis/java-bigquery/issues/1925)) ([0e88b93](https://github.com/googleapis/java-bigquery/commit/0e88b930ef4d2581e0d44ba31a0d4b6a05f6cc53)) + +## [2.10.0](https://github.com/googleapis/java-bigquery/compare/v2.9.4...v2.10.0) (2022-03-14) + + +### Features + +* set Table.Schema for permanent external tables ([#1701](https://github.com/googleapis/java-bigquery/issues/1701)) ([73e829b](https://github.com/googleapis/java-bigquery/commit/73e829bad373279b13fb59a56b1dc60eac0835a0)) + + +### Documentation + +* **sample:** Added AuthorizeDataset Sample ([#1909](https://github.com/googleapis/java-bigquery/issues/1909)) ([a7a196b](https://github.com/googleapis/java-bigquery/commit/a7a196b4ea9cab28448bafe0fdc64f5e3de0412f)) +* **samples:** fix undeleteTable sample IT failure ([#1912](https://github.com/googleapis/java-bigquery/issues/1912)) ([7802f16](https://github.com/googleapis/java-bigquery/commit/7802f16fb24bf29ab93139d8404d4b3c4d80b506)), closes [#1911](https://github.com/googleapis/java-bigquery/issues/1911) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220307-1.32.1 ([#1921](https://github.com/googleapis/java-bigquery/issues/1921)) ([fcad209](https://github.com/googleapis/java-bigquery/commit/fcad2091f9a24d667ceefc5a6d9aa57542bed702)) +* update dependency com.google.cloud:google-cloud-storage to v2.4.5 ([#1906](https://github.com/googleapis/java-bigquery/issues/1906)) ([d35d689](https://github.com/googleapis/java-bigquery/commit/d35d68963bc6a668d7177ac47d09b65dbefb9b7b)) +* update dependency com.google.cloud:native-image-support to v0.12.10 ([#1919](https://github.com/googleapis/java-bigquery/issues/1919)) ([a59ccf5](https://github.com/googleapis/java-bigquery/commit/a59ccf59c5fb1389fc5c5ed42ec8c41182f2e59d)) +* update dependency com.google.cloud:native-image-support to v0.12.8 ([#1907](https://github.com/googleapis/java-bigquery/issues/1907)) ([fddf593](https://github.com/googleapis/java-bigquery/commit/fddf59346e9635b5f10f94803ca87933337dc337)) +* update dependency com.google.cloud:native-image-support to v0.12.9 ([#1913](https://github.com/googleapis/java-bigquery/issues/1913)) ([830dd50](https://github.com/googleapis/java-bigquery/commit/830dd50ffaf62b398a1325df44e4c92cd0a6ae1b)) + +### [2.9.4](https://github.com/googleapis/java-bigquery/compare/v2.9.3...v2.9.4) (2022-03-08) + + +### Dependencies + +* update dependency com.google.cloud:native-image-support to v0.12.7 ([#1896](https://github.com/googleapis/java-bigquery/issues/1896)) ([5dcb02b](https://github.com/googleapis/java-bigquery/commit/5dcb02b04f9a87ba39e7cfa72229318926262029)) + +### [2.9.3](https://github.com/googleapis/java-bigquery/compare/v2.9.2...v2.9.3) (2022-03-08) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2.6.0 ([#1892](https://github.com/googleapis/java-bigquery/issues/1892)) ([ce06adb](https://github.com/googleapis/java-bigquery/commit/ce06adb5f95704309eaf0ab4b49d2bdb4ceaeb98)) + +### [2.9.2](https://github.com/googleapis/java-bigquery/compare/v2.9.1...v2.9.2) (2022-03-07) + + +### Bug Fixes + +* add missing equality check for targetTypes in DatasetAclEntity ([#1866](https://github.com/googleapis/java-bigquery/issues/1866)) ([ca28e2d](https://github.com/googleapis/java-bigquery/commit/ca28e2d68901b6c9332f97c7985aaca7f4486e29)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220222-1.32.1 ([#1888](https://github.com/googleapis/java-bigquery/issues/1888)) ([c8eb867](https://github.com/googleapis/java-bigquery/commit/c8eb8671e53759e786955dd44fae4867632237e4)) +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220226-1.32.1 ([#1890](https://github.com/googleapis/java-bigquery/issues/1890)) ([c8c5643](https://github.com/googleapis/java-bigquery/commit/c8c5643d0552f9f28a684514cd192f985e0d711c)) + +### [2.9.1](https://github.com/googleapis/java-bigquery/compare/v2.9.0...v2.9.1) (2022-03-03) + + +### Bug Fixes + +* adjusting retry logic to avoid retrying successful job creation ([#1879](https://github.com/googleapis/java-bigquery/issues/1879)) ([fd07533](https://github.com/googleapis/java-bigquery/commit/fd0753338e15965347683345b0e51838baf5d9f6)) +* **java:** add additional configurations to fix native image tests ([#1859](https://github.com/googleapis/java-bigquery/issues/1859)) ([3e82960](https://github.com/googleapis/java-bigquery/commit/3e82960f75ced489f9f0e72fe45165ab866f1d8b)) + + +### Documentation + +* **sample:** Table exists sample fix ([#1868](https://github.com/googleapis/java-bigquery/issues/1868)) ([698306e](https://github.com/googleapis/java-bigquery/commit/698306e480b5f3a180c62b6d9ae0d919e05154d3)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220220-1.32.1 ([#1872](https://github.com/googleapis/java-bigquery/issues/1872)) ([e67cf65](https://github.com/googleapis/java-bigquery/commit/e67cf65bc044d07ba386f98cf67d2e16144255d0)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.8.0 ([#1876](https://github.com/googleapis/java-bigquery/issues/1876)) ([a16985f](https://github.com/googleapis/java-bigquery/commit/a16985f79f5e09ee6567caf3eb502d7e88103f97)) +* update dependency com.google.cloud:google-cloud-storage to v2.4.2 ([#1853](https://github.com/googleapis/java-bigquery/issues/1853)) ([ef91109](https://github.com/googleapis/java-bigquery/commit/ef91109821a702a6b55b4f1265e812578ca881d8)) +* update dependency com.google.cloud:google-cloud-storage to v2.4.4 ([#1873](https://github.com/googleapis/java-bigquery/issues/1873)) ([a4deb16](https://github.com/googleapis/java-bigquery/commit/a4deb16ed54edf51608f27b47b0846fb23c553fd)) +* update dependency com.google.cloud:native-image-support to v0.12.4 ([#1855](https://github.com/googleapis/java-bigquery/issues/1855)) ([376738d](https://github.com/googleapis/java-bigquery/commit/376738d5fb7253de6e2e9d574aa99e9d7a9e67ad)) +* update dependency com.google.cloud:native-image-support to v0.12.5 ([#1874](https://github.com/googleapis/java-bigquery/issues/1874)) ([c68c49a](https://github.com/googleapis/java-bigquery/commit/c68c49a26abdcce8468b5e848cf39c458aba4774)) +* update dependency com.google.cloud:native-image-support to v0.12.6 ([#1878](https://github.com/googleapis/java-bigquery/issues/1878)) ([3749921](https://github.com/googleapis/java-bigquery/commit/3749921d6d120ffd79941c9ede64822cea03f1cd)) +* update dependency com.google.code.gson:gson to v2.9.0 ([#1850](https://github.com/googleapis/java-bigquery/issues/1850)) ([627da62](https://github.com/googleapis/java-bigquery/commit/627da62bd02314c673c345bd8eb87e973a805bc7)) +* update dependency org.graalvm.buildtools:junit-platform-native to v0.9.10 ([#1860](https://github.com/googleapis/java-bigquery/issues/1860)) ([b31b44c](https://github.com/googleapis/java-bigquery/commit/b31b44c170b1bc948daaae1a9ae6c469370f986c)) +* update dependency org.graalvm.buildtools:native-maven-plugin to v0.9.10 ([#1861](https://github.com/googleapis/java-bigquery/issues/1861)) ([ae05dfe](https://github.com/googleapis/java-bigquery/commit/ae05dfed0e670826f7674dc092b91bd5f634bf97)) + +## [2.9.0](https://github.com/googleapis/java-bigquery/compare/v2.8.0...v2.9.0) (2022-02-11) + + +### Features + +* add Interval type support ([#1844](https://github.com/googleapis/java-bigquery/issues/1844)) ([fd3751a](https://github.com/googleapis/java-bigquery/commit/fd3751a44be8f6401ea4b13684f862177ee9e976)) + + +### Documentation + +* **sample:** Add sample for native image support in Bigquery ([#1829](https://github.com/googleapis/java-bigquery/issues/1829)) ([7bb6c79](https://github.com/googleapis/java-bigquery/commit/7bb6c79e4839f183dda021ddf81a3961efd752d6)) + + +### Dependencies + +* update actions/github-script action to v6 ([#1847](https://github.com/googleapis/java-bigquery/issues/1847)) ([7ffe963](https://github.com/googleapis/java-bigquery/commit/7ffe963043ae8b243f1e351a5fffd992f3fcbbb5)) +* update dependency com.google.cloud:google-cloud-bigtable to v2.5.3 ([#1840](https://github.com/googleapis/java-bigquery/issues/1840)) ([88fc05f](https://github.com/googleapis/java-bigquery/commit/88fc05f3233e4e3a9cdfa73eff9856e4fd6fb1c7)) +* update dependency com.google.cloud:google-cloud-storage to v2.4.0 ([#1828](https://github.com/googleapis/java-bigquery/issues/1828)) ([d628fff](https://github.com/googleapis/java-bigquery/commit/d628fff9b899e13c75aaf26d42bfc553c48a3c4e)) +* update dependency com.google.cloud:google-cloud-storage to v2.4.1 ([#1839](https://github.com/googleapis/java-bigquery/issues/1839)) ([e8ebd5c](https://github.com/googleapis/java-bigquery/commit/e8ebd5c2ed29f26aa004e1bdf59ab2e7afb2963c)) +* update dependency com.google.cloud:native-image-support to v0.12.0 ([#1832](https://github.com/googleapis/java-bigquery/issues/1832)) ([1d27b30](https://github.com/googleapis/java-bigquery/commit/1d27b309e2fa6cdc99fc08234390a065d7ca1098)) +* update dependency com.google.cloud:native-image-support to v0.12.1 ([#1841](https://github.com/googleapis/java-bigquery/issues/1841)) ([15918a1](https://github.com/googleapis/java-bigquery/commit/15918a1fa006734ee265ccc569facb8958a1d0bb)) +* update dependency com.google.cloud:native-image-support to v0.12.2 ([#1843](https://github.com/googleapis/java-bigquery/issues/1843)) ([56e6acf](https://github.com/googleapis/java-bigquery/commit/56e6acf4def66c4c298fa7bb6b38025db9faee68)) +* update dependency com.google.cloud:native-image-support to v0.12.3 ([#1845](https://github.com/googleapis/java-bigquery/issues/1845)) ([b64b441](https://github.com/googleapis/java-bigquery/commit/b64b441bf4d0e79434e556f1fdb9ec0083d5baec)) +* update dependency com.google.oauth-client:google-oauth-client-java6 to v1.33.1 ([#1835](https://github.com/googleapis/java-bigquery/issues/1835)) ([7680714](https://github.com/googleapis/java-bigquery/commit/7680714f4a2d0da798ec3ea613701251cba859ff)) +* update dependency com.google.oauth-client:google-oauth-client-jetty to v1.33.1 ([#1836](https://github.com/googleapis/java-bigquery/issues/1836)) ([950f3cd](https://github.com/googleapis/java-bigquery/commit/950f3cdb3be2571f0519848aa167e67949e06f1e)) + +## [2.8.0](https://github.com/googleapis/java-bigquery/compare/v2.7.1...v2.8.0) (2022-02-02) + + +### Features + +* add Dataset ACL support ([#1763](https://github.com/googleapis/java-bigquery/issues/1763)) ([18a11e8](https://github.com/googleapis/java-bigquery/commit/18a11e88c0be5c0d5cf89d498439d5f8347e589d)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20220123-1.32.1 ([#1819](https://github.com/googleapis/java-bigquery/issues/1819)) ([82175f1](https://github.com/googleapis/java-bigquery/commit/82175f19634550f8b16c830362798396cd28e79d)) +* update dependency com.google.cloud:google-cloud-bigtable to v2.5.2 ([#1821](https://github.com/googleapis/java-bigquery/issues/1821)) ([0fe0a78](https://github.com/googleapis/java-bigquery/commit/0fe0a78db110794f9d2797bd74792d361acef96c)) + +### [2.7.1](https://github.com/googleapis/java-bigquery/compare/v2.7.0...v2.7.1) (2022-02-01) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.7.0 ([#1813](https://github.com/googleapis/java-bigquery/issues/1813)) ([f2cfc8b](https://github.com/googleapis/java-bigquery/commit/f2cfc8bc5f97359a69ac3647919670bd714ac953)) + + +### Documentation + +* **samples:** fix CopyMultipleTables sample IT failure and improve a few other samples ([#1817](https://github.com/googleapis/java-bigquery/issues/1817)) ([e12122c](https://github.com/googleapis/java-bigquery/commit/e12122c4472ed4c3d00fc8c7515be210bbf68df3)) +* **samples:** fix GrantViewAccess sample IT failure ([#1816](https://github.com/googleapis/java-bigquery/issues/1816)) ([d48ae41](https://github.com/googleapis/java-bigquery/commit/d48ae41d1437bd9246d973a9f0b56f230a1eea68)) + +## [2.7.0](https://github.com/googleapis/java-bigquery/compare/v2.6.2...v2.7.0) (2022-01-27) + + +### Features + +* add JSON type support ([#1799](https://github.com/googleapis/java-bigquery/issues/1799)) ([73c4a73](https://github.com/googleapis/java-bigquery/commit/73c4a7330b717416fb0c9ce21215460f25faa930)) + + +### Dependencies + +* **java:** update actions/github-script action to v5 ([#1339](https://github.com/googleapis/java-bigquery/issues/1339)) ([#1809](https://github.com/googleapis/java-bigquery/issues/1809)) ([90afea5](https://github.com/googleapis/java-bigquery/commit/90afea5d50218c89d350fbb572072f2d75710072)) +* update actions/github-script action to v5 ([#1808](https://github.com/googleapis/java-bigquery/issues/1808)) ([8e5f585](https://github.com/googleapis/java-bigquery/commit/8e5f58552e83abf309e314bddbfdc9ab3527181e)) +* update dependency com.google.cloud:google-cloud-storage to v2.3.0 ([#1796](https://github.com/googleapis/java-bigquery/issues/1796)) ([8b77d9b](https://github.com/googleapis/java-bigquery/commit/8b77d9b207b96dcbb4afc2e8f06fb9c147ce6a90)) +* update dependency com.google.oauth-client:google-oauth-client-java6 to v1.33.0 ([#1802](https://github.com/googleapis/java-bigquery/issues/1802)) ([c78fc77](https://github.com/googleapis/java-bigquery/commit/c78fc775fb5278e7925a1d473d40e3a801eb4acf)) +* update dependency com.google.oauth-client:google-oauth-client-jetty to v1.33.0 ([#1803](https://github.com/googleapis/java-bigquery/issues/1803)) ([8e34e59](https://github.com/googleapis/java-bigquery/commit/8e34e59f13d289bcc9ea42d954c16db9eed1a423)) +* update dependency org.assertj:assertj-core to v3 ([#1786](https://github.com/googleapis/java-bigquery/issues/1786)) ([69fcabf](https://github.com/googleapis/java-bigquery/commit/69fcabf478c6fab23c4da3fcc516f820cc178a5b)) + +### [2.6.2](https://www.github.com/googleapis/java-bigquery/compare/v2.6.1...v2.6.2) (2022-01-09) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2.5.1 ([#1780](https://www.github.com/googleapis/java-bigquery/issues/1780)) ([60c4c44](https://www.github.com/googleapis/java-bigquery/commit/60c4c4470d77467f68e876c6d841df1f4e98dc20)) +* update dependency com.google.cloud:google-cloud-storage to v2.2.3 ([#1779](https://www.github.com/googleapis/java-bigquery/issues/1779)) ([925d22f](https://www.github.com/googleapis/java-bigquery/commit/925d22f8e142d7d19d40d229147e777c94b2c293)) + +### [2.6.1](https://www.github.com/googleapis/java-bigquery/compare/v2.6.0...v2.6.1) (2022-01-07) + + +### Bug Fixes + +* **java:** Pass missing integration test flags to native image test commands ([#1309](https://www.github.com/googleapis/java-bigquery/issues/1309)) ([#1766](https://www.github.com/googleapis/java-bigquery/issues/1766)) ([5363981](https://www.github.com/googleapis/java-bigquery/commit/536398115b5567f09b32de00f64f712ce811ae6c)) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2.5.0 ([#1770](https://www.github.com/googleapis/java-bigquery/issues/1770)) ([d4ae6e7](https://www.github.com/googleapis/java-bigquery/commit/d4ae6e720c5f38bdf71e1bb1ecf949d3a3a5747a)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.6.0 ([#1774](https://www.github.com/googleapis/java-bigquery/issues/1774)) ([53db89d](https://www.github.com/googleapis/java-bigquery/commit/53db89d6d20aa29480b1583393c28749875001f5)) + +## [2.6.0](https://www.github.com/googleapis/java-bigquery/compare/v2.5.1...v2.6.0) (2021-12-27) + + +### Features + +* create Job retry for rate limit exceeded with status code 200 ([#1744](https://www.github.com/googleapis/java-bigquery/issues/1744)) ([97a61dc](https://www.github.com/googleapis/java-bigquery/commit/97a61dc90fb701986a51a12c9c83b7138894307a)) + + +### Bug Fixes + +* **java:** add -ntp flag to native image testing command ([#1299](https://www.github.com/googleapis/java-bigquery/issues/1299)) ([#1738](https://www.github.com/googleapis/java-bigquery/issues/1738)) ([585875e](https://www.github.com/googleapis/java-bigquery/commit/585875e776e17660c58f9f8fe8385f13833bca57)) + + +### Documentation + +* rename alter materialized view to update ([#1754](https://www.github.com/googleapis/java-bigquery/issues/1754)) ([0b7d911](https://www.github.com/googleapis/java-bigquery/commit/0b7d91135222505f0eb01e8b40095156a073b62e)) +* **samples:** update UpdateTableExpirationIT to fix failing IT. ([#1753](https://www.github.com/googleapis/java-bigquery/issues/1753)) ([a62a9f4](https://www.github.com/googleapis/java-bigquery/commit/a62a9f4fdda465b8c9e2f67f111d1b1b4a067903)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20211129-1.32.1 ([#1737](https://www.github.com/googleapis/java-bigquery/issues/1737)) ([776ff10](https://www.github.com/googleapis/java-bigquery/commit/776ff1004592f62799ff0244a448d6911bcca5be)) +* update dependency com.google.cloud:google-cloud-bigtable to v2.3.1 ([#1741](https://www.github.com/googleapis/java-bigquery/issues/1741)) ([2f31a0a](https://www.github.com/googleapis/java-bigquery/commit/2f31a0a4f491eca25cbd3992e48f94214bfd605b)) +* update dependency com.google.cloud:google-cloud-bigtable to v2.4.0 ([#1746](https://www.github.com/googleapis/java-bigquery/issues/1746)) ([92e5d02](https://www.github.com/googleapis/java-bigquery/commit/92e5d02ff25511233b15f07844bb8b13de2dc72f)) +* update dependency com.google.cloud:google-cloud-storage to v2.2.2 ([#1740](https://www.github.com/googleapis/java-bigquery/issues/1740)) ([2022301](https://www.github.com/googleapis/java-bigquery/commit/2022301b39390f20796b8c5b3d6ee0e82aa127aa)) +* update jmh.version to v1.34 ([#1758](https://www.github.com/googleapis/java-bigquery/issues/1758)) ([5a2bcbc](https://www.github.com/googleapis/java-bigquery/commit/5a2bcbc7197fa75a464ed62d3e3df3bd43652b9d)) + +### [2.5.1](https://www.github.com/googleapis/java-bigquery/compare/v2.5.0...v2.5.1) (2021-12-03) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2.3.0 ([#1730](https://www.github.com/googleapis/java-bigquery/issues/1730)) ([6d503e8](https://www.github.com/googleapis/java-bigquery/commit/6d503e887d44d76a10fee6c9eaad69ae926b2489)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.5.1 ([#1731](https://www.github.com/googleapis/java-bigquery/issues/1731)) ([3b4b075](https://www.github.com/googleapis/java-bigquery/commit/3b4b0755eea06f8d1e5c290fc9aae500676e7213)) + +## [2.5.0](https://www.github.com/googleapis/java-bigquery/compare/v2.4.1...v2.5.0) (2021-12-01) + + +### Features + +* add support for BI Engine Statistics ([#1723](https://www.github.com/googleapis/java-bigquery/issues/1723)) ([13cc6e6](https://www.github.com/googleapis/java-bigquery/commit/13cc6e608fd501067f7c5dcd2f5b9a03c078b065)) + +### [2.4.1](https://www.github.com/googleapis/java-bigquery/compare/v2.4.0...v2.4.1) (2021-11-16) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v2.2.1 ([#1709](https://www.github.com/googleapis/java-bigquery/issues/1709)) ([3e6ac61](https://www.github.com/googleapis/java-bigquery/commit/3e6ac614a92b492407a920601781ed654b8523c6)) + +## [2.4.0](https://www.github.com/googleapis/java-bigquery/compare/v2.3.3...v2.4.0) (2021-11-15) + + +### Features + +* induce minor version bump for lts ([#1688](https://www.github.com/googleapis/java-bigquery/issues/1688)) ([6cb11db](https://www.github.com/googleapis/java-bigquery/commit/6cb11db5f15e7d617bc5aa4a3ac5fdacbe515b77)) + + +### Bug Fixes + +* **java:** java 17 dependency arguments ([#1683](https://www.github.com/googleapis/java-bigquery/issues/1683)) ([bef2705](https://www.github.com/googleapis/java-bigquery/commit/bef2705208abfc837d16f01758c802d817420dd4)) +* removing a new line character in a property ([#1700](https://www.github.com/googleapis/java-bigquery/issues/1700)) ([5185801](https://www.github.com/googleapis/java-bigquery/commit/5185801797c620dba9de7e72b7dea8ddc600ed58)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20211106-1.32.1 ([#1703](https://www.github.com/googleapis/java-bigquery/issues/1703)) ([8987086](https://www.github.com/googleapis/java-bigquery/commit/8987086469ff3ce6320332353744b0adfbb2aefd)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.5.0 ([#1702](https://www.github.com/googleapis/java-bigquery/issues/1702)) ([33ab54f](https://www.github.com/googleapis/java-bigquery/commit/33ab54f1559f903ec78f6d568c0aee666b2ad804)) +* update dependency com.google.cloud:google-cloud-storage to v2.2.0 ([#1691](https://www.github.com/googleapis/java-bigquery/issues/1691)) ([1f46d8d](https://www.github.com/googleapis/java-bigquery/commit/1f46d8dd316f1c8df392f749428986d4d9c7fa07)) + +### [2.3.3](https://www.github.com/googleapis/java-bigquery/compare/v2.3.2...v2.3.3) (2021-10-25) + + +### Bug Fixes + +* allow retry on connection establishing exceptions ([#1666](https://www.github.com/googleapis/java-bigquery/issues/1666)) ([fd06ad2](https://www.github.com/googleapis/java-bigquery/commit/fd06ad2728e52eac2e8570b0ba15830ad79470ad)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20211017-1.32.1 ([#1679](https://www.github.com/googleapis/java-bigquery/issues/1679)) ([5e46e5c](https://www.github.com/googleapis/java-bigquery/commit/5e46e5c59f58efb996364edb394b149f4ead8428)) + +### [2.3.2](https://www.github.com/googleapis/java-bigquery/compare/v2.3.1...v2.3.2) (2021-10-20) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2.2.0 ([#1667](https://www.github.com/googleapis/java-bigquery/issues/1667)) ([201852f](https://www.github.com/googleapis/java-bigquery/commit/201852fa3f9947da54bf4c4ec79d1b2630d76f2f)) + +### [2.3.1](https://www.github.com/googleapis/java-bigquery/compare/v2.3.0...v2.3.1) (2021-10-19) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.4.0 ([#1661](https://www.github.com/googleapis/java-bigquery/issues/1661)) ([a499bbc](https://www.github.com/googleapis/java-bigquery/commit/a499bbc526da6a2e7f289ba2a86d9d206659d88c)) +* update dependency com.google.cloud:google-cloud-storage to v2.1.9 ([#1659](https://www.github.com/googleapis/java-bigquery/issues/1659)) ([16c2d22](https://www.github.com/googleapis/java-bigquery/commit/16c2d22550812e908f19969c27bcaf9dd5f861e1)) + +## [2.3.0](https://www.github.com/googleapis/java-bigquery/compare/v2.2.1...v2.3.0) (2021-10-15) + + +### Features + +* add session support ([#1652](https://www.github.com/googleapis/java-bigquery/issues/1652)) ([acc6cb8](https://www.github.com/googleapis/java-bigquery/commit/acc6cb8ad318ae41a9a3a00a5942025c14cbe681)) + + +### Bug Fixes + +* fix timestamp rounding issue ([#1645](https://www.github.com/googleapis/java-bigquery/issues/1645)) ([e60bdff](https://www.github.com/googleapis/java-bigquery/commit/e60bdff9e196a618a59a0544ec93f87b2ec5fc82)) + +### [2.2.1](https://www.github.com/googleapis/java-bigquery/compare/v2.2.0...v2.2.1) (2021-10-05) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210927-1.32.1 ([#1634](https://www.github.com/googleapis/java-bigquery/issues/1634)) ([13f4523](https://www.github.com/googleapis/java-bigquery/commit/13f45230a6f8ef888a6afafc02a270fb012e962f)) +* update dependency com.google.cloud:google-cloud-storage to v2.1.7 ([#1635](https://www.github.com/googleapis/java-bigquery/issues/1635)) ([abb64dc](https://www.github.com/googleapis/java-bigquery/commit/abb64dccd569632c2de5cc20b3f8d34642dab98f)) + +## [2.2.0](https://www.github.com/googleapis/java-bigquery/compare/v2.1.13...v2.2.0) (2021-10-01) + + +### Features + +* add support for AvroOptions ([#1630](https://www.github.com/googleapis/java-bigquery/issues/1630)) ([10c1961](https://www.github.com/googleapis/java-bigquery/commit/10c1961f53ab6ba1b71ead9c51a369bf14389c49)) + +### [2.1.13](https://www.github.com/googleapis/java-bigquery/compare/v2.1.12...v2.1.13) (2021-09-29) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v2.1.6 ([#1621](https://www.github.com/googleapis/java-bigquery/issues/1621)) ([dfa15e5](https://www.github.com/googleapis/java-bigquery/commit/dfa15e5ca08a3227f015a389c4c08732178a73e7)) + +### [2.1.12](https://www.github.com/googleapis/java-bigquery/compare/v2.1.11...v2.1.12) (2021-09-24) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2.1.4 ([#1616](https://www.github.com/googleapis/java-bigquery/issues/1616)) ([3e3b1da](https://www.github.com/googleapis/java-bigquery/commit/3e3b1da0f9e4006a83ed824f8ed67a9798219bee)) +* update dependency com.google.cloud:google-cloud-storage to v2.1.5 ([#1615](https://www.github.com/googleapis/java-bigquery/issues/1615)) ([00cc068](https://www.github.com/googleapis/java-bigquery/commit/00cc0682b694bcdad927e0ec1770b99896402563)) + +### [2.1.11](https://www.github.com/googleapis/java-bigquery/compare/v2.1.10...v2.1.11) (2021-09-22) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2.1.3 ([#1609](https://www.github.com/googleapis/java-bigquery/issues/1609)) ([6b04f76](https://www.github.com/googleapis/java-bigquery/commit/6b04f769ad5fb2eaa0eec57a5e937b81413c95e3)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.3.0 ([#1610](https://www.github.com/googleapis/java-bigquery/issues/1610)) ([fb3895e](https://www.github.com/googleapis/java-bigquery/commit/fb3895e811cdec46b05f2188cd054788fcb03a1d)) + +### [2.1.10](https://www.github.com/googleapis/java-bigquery/compare/v2.1.9...v2.1.10) (2021-09-22) + + +### Bug Fixes + +* add retrySetting in logging ([#1604](https://www.github.com/googleapis/java-bigquery/issues/1604)) ([1a96cb0](https://www.github.com/googleapis/java-bigquery/commit/1a96cb076ded8eae8c34962aa00e914ba197b516)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210910-1.32.1 ([#1602](https://www.github.com/googleapis/java-bigquery/issues/1602)) ([1ccac9a](https://www.github.com/googleapis/java-bigquery/commit/1ccac9a6ca891fd160911de524e8227414b29e75)) + +### [2.1.9](https://www.github.com/googleapis/java-bigquery/compare/v2.1.8...v2.1.9) (2021-09-21) + + +### Bug Fixes + +* do not throw NPE from BigQueryImpl.testIamPermissions ([#1596](https://www.github.com/googleapis/java-bigquery/issues/1596)) ([4251b19](https://www.github.com/googleapis/java-bigquery/commit/4251b19f5b240b907aa5fc6d0cc64026245714cf)) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v2.1.4 ([#1597](https://www.github.com/googleapis/java-bigquery/issues/1597)) ([354cf7f](https://www.github.com/googleapis/java-bigquery/commit/354cf7f3a8dd8cf24d9a5f7e3377591a13b60863)) + +### [2.1.8](https://www.github.com/googleapis/java-bigquery/compare/v2.1.7...v2.1.8) (2021-09-16) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210904-1.32.1 ([#1584](https://www.github.com/googleapis/java-bigquery/issues/1584)) ([9b42705](https://www.github.com/googleapis/java-bigquery/commit/9b42705e670e095cfd48b239e814d6608f4748c7)) +* update dependency com.google.cloud:google-cloud-bigtable to v2.1.2 ([#1585](https://www.github.com/googleapis/java-bigquery/issues/1585)) ([114d587](https://www.github.com/googleapis/java-bigquery/commit/114d587e4b6bf25df3a33a7d9bfc2d22cb929812)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.2.1 ([#1586](https://www.github.com/googleapis/java-bigquery/issues/1586)) ([7478903](https://www.github.com/googleapis/java-bigquery/commit/747890305f503a8b8e2406d3c51b4b7e0fae0741)) +* update dependency com.google.cloud:google-cloud-storage to v2.1.3 ([#1587](https://www.github.com/googleapis/java-bigquery/issues/1587)) ([f006fd0](https://www.github.com/googleapis/java-bigquery/commit/f006fd02d2966e358d53af7da4a86901ad64e4e9)) + +### [2.1.7](https://www.github.com/googleapis/java-bigquery/compare/v2.1.6...v2.1.7) (2021-09-07) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210828-1.32.1 ([#1561](https://www.github.com/googleapis/java-bigquery/issues/1561)) ([cd6b3e2](https://www.github.com/googleapis/java-bigquery/commit/cd6b3e2c663e626fd3769813fd50f66a0a3231df)) +* update dependency com.google.cloud:google-cloud-storage to v2.1.1 ([#1565](https://www.github.com/googleapis/java-bigquery/issues/1565)) ([b0e5d7a](https://www.github.com/googleapis/java-bigquery/commit/b0e5d7ade8a82680cb02050586b758b83b80632e)) + +### [2.1.6](https://www.github.com/googleapis/java-bigquery/compare/v2.1.5...v2.1.6) (2021-09-02) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2.1.1 ([#1554](https://www.github.com/googleapis/java-bigquery/issues/1554)) ([a88f662](https://www.github.com/googleapis/java-bigquery/commit/a88f662a1fc1c21f59e5847670dbb3f559e754f2)) + +### [2.1.5](https://www.github.com/googleapis/java-bigquery/compare/v2.1.4...v2.1.5) (2021-08-31) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210819-1.32.1 ([#1546](https://www.github.com/googleapis/java-bigquery/issues/1546)) ([c00e6b6](https://www.github.com/googleapis/java-bigquery/commit/c00e6b663b983e6ec030323d9da6829416065be5)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.2.0 ([#1549](https://www.github.com/googleapis/java-bigquery/issues/1549)) ([69889f0](https://www.github.com/googleapis/java-bigquery/commit/69889f08b830ea47de66410cc45e9be3248c8154)) + +### [2.1.4](https://www.github.com/googleapis/java-bigquery/compare/v2.1.3...v2.1.4) (2021-08-25) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v2.1.0 ([#1534](https://www.github.com/googleapis/java-bigquery/issues/1534)) ([d998cf6](https://www.github.com/googleapis/java-bigquery/commit/d998cf65c653eff3322547662b7e88b51ec21c15)) + +### [2.1.3](https://www.github.com/googleapis/java-bigquery/compare/v2.1.2...v2.1.3) (2021-08-24) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210813-1.32.1 ([#1520](https://www.github.com/googleapis/java-bigquery/issues/1520)) ([da09e53](https://www.github.com/googleapis/java-bigquery/commit/da09e53bbdd18693c182d7945506b0531bb445e4)) +* update dependency com.google.cloud:google-cloud-bigtable to v2.0.1 ([#1521](https://www.github.com/googleapis/java-bigquery/issues/1521)) ([11da456](https://www.github.com/googleapis/java-bigquery/commit/11da456eb42f8b86d4ec64d192cd580f9e219aeb)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.1.0 ([#1525](https://www.github.com/googleapis/java-bigquery/issues/1525)) ([cd7e46b](https://www.github.com/googleapis/java-bigquery/commit/cd7e46b6a7e78937a59c0a3aee0c81e51a342ce3)) +* update dependency com.google.cloud:google-cloud-storage to v2.0.2 ([#1522](https://www.github.com/googleapis/java-bigquery/issues/1522)) ([6be4f24](https://www.github.com/googleapis/java-bigquery/commit/6be4f2473ecf1b710e7142da27616b374b3cca12)) +* update dependency com.google.oauth-client:google-oauth-client-java6 to v1.32.1 ([#1526](https://www.github.com/googleapis/java-bigquery/issues/1526)) ([c31f18b](https://www.github.com/googleapis/java-bigquery/commit/c31f18b6dc9e02adcf72dc8ae8526d4ab766797c)) +* update dependency com.google.oauth-client:google-oauth-client-jetty to v1.32.1 ([#1527](https://www.github.com/googleapis/java-bigquery/issues/1527)) ([a36fc45](https://www.github.com/googleapis/java-bigquery/commit/a36fc45329b9a73aa6ee1316b2d348276f30d25c)) + +### [2.1.2](https://www.github.com/googleapis/java-bigquery/compare/v2.1.1...v2.1.2) (2021-08-17) + + +### Bug Fixes + +* add retry logging for BigQueryRetryAlgorithm.java ([#1506](https://www.github.com/googleapis/java-bigquery/issues/1506)) ([f598279](https://www.github.com/googleapis/java-bigquery/commit/f5982794fb5f9c13c3e328efcb8ab852682b4466)) + +### [2.1.1](https://www.github.com/googleapis/java-bigquery/compare/v2.1.0...v2.1.1) (2021-08-12) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2.0.1 ([#1503](https://www.github.com/googleapis/java-bigquery/issues/1503)) ([3f6f6ef](https://www.github.com/googleapis/java-bigquery/commit/3f6f6ef4d67b2ef01dac7f5aa2ed2eea862157e8)) + +## [2.1.0](https://www.github.com/googleapis/java-bigquery/compare/v2.0.1...v2.1.0) (2021-08-11) + + +### Features + +* add support for transactioninfo in query statistics ([#1497](https://www.github.com/googleapis/java-bigquery/issues/1497)) ([4c3b2de](https://www.github.com/googleapis/java-bigquery/commit/4c3b2de16584b9079dd4afd5a33aa8c00bc75f20)), closes [#1467](https://www.github.com/googleapis/java-bigquery/issues/1467) +* Updated `BigQueryRetryAlgorithm` so that it can retry on RateLimit Errors using RegEx ([#1499](https://www.github.com/googleapis/java-bigquery/issues/1499)) ([ec68c11](https://www.github.com/googleapis/java-bigquery/commit/ec68c1145a89753e7d42458bbea86737cad6090f)) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v2 ([#1495](https://www.github.com/googleapis/java-bigquery/issues/1495)) ([cf26534](https://www.github.com/googleapis/java-bigquery/commit/cf265347e5e650670d19a1122f69497272854ebe)) +* update dependency com.google.cloud:google-cloud-storage to v2 ([#1491](https://www.github.com/googleapis/java-bigquery/issues/1491)) ([b705052](https://www.github.com/googleapis/java-bigquery/commit/b705052475899512c7981d3893b7d630f1488c99)) + +### [2.0.1](https://www.github.com/googleapis/java-bigquery/compare/v2.0.0...v2.0.1) (2021-08-09) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v1.118.1 ([#1486](https://www.github.com/googleapis/java-bigquery/issues/1486)) ([3ba6e1f](https://www.github.com/googleapis/java-bigquery/commit/3ba6e1f8548b5518076187a65b214c79195d86ac)) +* update jmh.version to v1.33 ([#1487](https://www.github.com/googleapis/java-bigquery/issues/1487)) ([3eedfbe](https://www.github.com/googleapis/java-bigquery/commit/3eedfbe208897c6ee45b480ff81a90a4dba9bdd4)) + +## [2.0.0](https://www.github.com/googleapis/java-bigquery/compare/v1.137.2...v2.0.0) (2021-08-05) + + +### ⚠ BREAKING CHANGES + +* update dependency com.google.cloud:google-cloud-shared-config to v1 (#1466) + +### Features + +* update dependency com.google.cloud:google-cloud-shared-config to v1 ([#1466](https://www.github.com/googleapis/java-bigquery/issues/1466)) ([3db013e](https://www.github.com/googleapis/java-bigquery/commit/3db013eeb01f255e1bc40af21370241761f2b519)) + +### [1.137.2](https://www.github.com/googleapis/java-bigquery/compare/v1.137.1...v1.137.2) (2021-08-05) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210726-1.32.1 ([#1469](https://www.github.com/googleapis/java-bigquery/issues/1469)) ([9df5ac2](https://www.github.com/googleapis/java-bigquery/commit/9df5ac2525e3aa9b52512d59cab0fc8b998f0bc9)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v2 ([#1472](https://www.github.com/googleapis/java-bigquery/issues/1472)) ([227983e](https://www.github.com/googleapis/java-bigquery/commit/227983e2c69dfdd5001f9c951f086daadb937578)) + +### [1.137.1](https://www.github.com/googleapis/java-bigquery/compare/v1.137.0...v1.137.1) (2021-07-20) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v1.27.3 ([#1452](https://www.github.com/googleapis/java-bigquery/issues/1452)) ([fea5eca](https://www.github.com/googleapis/java-bigquery/commit/fea5eca733d501b72b8713e555f54dc86bbde85f)) + +## [1.137.0](https://www.github.com/googleapis/java-bigquery/compare/v1.136.0...v1.137.0) (2021-07-14) + + +### Features + +* Implemented BigQueryRetryAlgorithm to retry on the basis of the configured re-triable error messages ([#1426](https://www.github.com/googleapis/java-bigquery/issues/1426)) ([44d9795](https://www.github.com/googleapis/java-bigquery/commit/44d97953febac11c9167efe6781c779f486355d0)) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v1.118.0 ([#1445](https://www.github.com/googleapis/java-bigquery/issues/1445)) ([271b861](https://www.github.com/googleapis/java-bigquery/commit/271b8612da64e5216ac6dc5a39493b0593b23320)) + +## [1.136.0](https://www.github.com/googleapis/java-bigquery/compare/v1.135.4...v1.136.0) (2021-07-08) + + +### Features + +* add dmlStatistics support ([#1431](https://www.github.com/googleapis/java-bigquery/issues/1431)) ([9d67e05](https://www.github.com/googleapis/java-bigquery/commit/9d67e05d2a6fab0c9e6017dec33b0d74ad821825)) + +### [1.135.4](https://www.github.com/googleapis/java-bigquery/compare/v1.135.3...v1.135.4) (2021-07-08) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210617-1.32.1 ([#1425](https://www.github.com/googleapis/java-bigquery/issues/1425)) ([82467ed](https://www.github.com/googleapis/java-bigquery/commit/82467ed4d8435e33a6496663a5dd6ae9377d4212)) + +### [1.135.3](https://www.github.com/googleapis/java-bigquery/compare/v1.135.2...v1.135.3) (2021-07-02) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v1.27.1 ([#1414](https://www.github.com/googleapis/java-bigquery/issues/1414)) ([6d6d276](https://www.github.com/googleapis/java-bigquery/commit/6d6d27699cc36984c2138beaf377906f8555d52a)) +* update dependency com.google.cloud:google-cloud-bigtable to v1.27.2 ([#1422](https://www.github.com/googleapis/java-bigquery/issues/1422)) ([53c6cf1](https://www.github.com/googleapis/java-bigquery/commit/53c6cf14b96289ff2291a0dec1327e5b98d32910)) +* update dependency com.google.cloud:google-cloud-storage to v1.117.1 ([#1417](https://www.github.com/googleapis/java-bigquery/issues/1417)) ([a746a16](https://www.github.com/googleapis/java-bigquery/commit/a746a1647db3b39c8f1bb68d97fb71bc477ad678)) + +### [1.135.2](https://www.github.com/googleapis/java-bigquery/compare/v1.135.1...v1.135.2) (2021-06-30) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-shared-dependencies to v1.4.0 ([#1411](https://www.github.com/googleapis/java-bigquery/issues/1411)) ([e626c4a](https://www.github.com/googleapis/java-bigquery/commit/e626c4aec7b7796fe5c888d7f726cd957469b4ad)) + +### [1.135.1](https://www.github.com/googleapis/java-bigquery/compare/v1.135.0...v1.135.1) (2021-06-29) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v1.117.0 ([#1403](https://www.github.com/googleapis/java-bigquery/issues/1403)) ([0cfcab0](https://www.github.com/googleapis/java-bigquery/commit/0cfcab02e7d80317b19062937677dbe07d3b3bd5)) + +## [1.135.0](https://www.github.com/googleapis/java-bigquery/compare/v1.134.1...v1.135.0) (2021-06-28) + + +### Features + +* add support for table snapshot ([#1320](https://www.github.com/googleapis/java-bigquery/issues/1320)) ([d783292](https://www.github.com/googleapis/java-bigquery/commit/d783292f4bd2d971235d8afa6318976856529cb5)) + +### [1.134.1](https://www.github.com/googleapis/java-bigquery/compare/v1.134.0...v1.134.1) (2021-06-28) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210617-1.31.5 ([#1394](https://www.github.com/googleapis/java-bigquery/issues/1394)) ([580ac64](https://www.github.com/googleapis/java-bigquery/commit/580ac64e98167db7a57ca7e196e792a48822dce1)) + +## [1.134.0](https://www.github.com/googleapis/java-bigquery/compare/v1.133.1...v1.134.0) (2021-06-25) + + +### Features + +* add support for jobs.delete ([#1387](https://www.github.com/googleapis/java-bigquery/issues/1387)) ([95f1a6c](https://www.github.com/googleapis/java-bigquery/commit/95f1a6c2e3281d61d38660749cf31fa504ddf8e0)) +* add support for parameterized type ([#1390](https://www.github.com/googleapis/java-bigquery/issues/1390)) ([b1fb57c](https://www.github.com/googleapis/java-bigquery/commit/b1fb57c0b43f4bdc9e32b821b32e95bb1ca8df3b)), closes [#1309](https://www.github.com/googleapis/java-bigquery/issues/1309) + + +### Bug Fixes + +* bug fix for get method of Bigquery Dataset ([#1379](https://www.github.com/googleapis/java-bigquery/issues/1379)) ([f034a99](https://www.github.com/googleapis/java-bigquery/commit/f034a99806613fc6abb5587c56362fe38d60bebc)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210529-1.31.5 ([#1380](https://www.github.com/googleapis/java-bigquery/issues/1380)) ([4a6906a](https://www.github.com/googleapis/java-bigquery/commit/4a6906aa1e3ac139973823bdf142e5c280f6ab68)) +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210611-1.31.5 ([#1383](https://www.github.com/googleapis/java-bigquery/issues/1383)) ([b09f951](https://www.github.com/googleapis/java-bigquery/commit/b09f951d9debe783c5f67c3e9479fb640722a8bd)) +* update dependency com.google.cloud:google-cloud-bigtable to v1.27.0 ([#1384](https://www.github.com/googleapis/java-bigquery/issues/1384)) ([6c4d886](https://www.github.com/googleapis/java-bigquery/commit/6c4d886a3875eaed5d7c10b82f6c064c1995b6ca)) + + +### Documentation + +* **sample:** update UpdateTableExpiration sample ([#1389](https://www.github.com/googleapis/java-bigquery/issues/1389)) ([ac854c4](https://www.github.com/googleapis/java-bigquery/commit/ac854c42fa4339f688c03d121e25df6030eabd3f)), closes [#1371](https://www.github.com/googleapis/java-bigquery/issues/1371) + +### [1.133.1](https://www.github.com/googleapis/java-bigquery/compare/v1.133.0...v1.133.1) (2021-06-16) + + +### Bug Fixes + +* handle specific exceptions ([#1370](https://www.github.com/googleapis/java-bigquery/issues/1370)) ([68808e9](https://www.github.com/googleapis/java-bigquery/commit/68808e9f47c07af598324f3f07dd996bc1e5297b)) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-storage to v1.116.0 ([#1375](https://www.github.com/googleapis/java-bigquery/issues/1375)) ([d064b7b](https://www.github.com/googleapis/java-bigquery/commit/d064b7b363508d1a958b92563c5df1d1c429c69d)) + +## [1.133.0](https://www.github.com/googleapis/java-bigquery/compare/v1.132.1...v1.133.0) (2021-06-08) + + +### Features + +* add support for DecimalTargetTypes ([#1345](https://www.github.com/googleapis/java-bigquery/issues/1345)) ([ba528df](https://www.github.com/googleapis/java-bigquery/commit/ba528df03def71907e2811cf267718f090605d95)) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v1.26.1 ([#1360](https://www.github.com/googleapis/java-bigquery/issues/1360)) ([bf55699](https://www.github.com/googleapis/java-bigquery/commit/bf55699d849de7e873577de04e44fbfe0f078ab1)) + +### [1.132.1](https://www.github.com/googleapis/java-bigquery/compare/v1.132.0...v1.132.1) (2021-06-07) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210529-1.31.0 ([#1355](https://www.github.com/googleapis/java-bigquery/issues/1355)) ([bc7744e](https://www.github.com/googleapis/java-bigquery/commit/bc7744e5570c2990231a680fb9cda9acebb3d7ca)) + +## [1.132.0](https://www.github.com/googleapis/java-bigquery/compare/v1.131.1...v1.132.0) (2021-06-04) + + +### Features + +* add `gcf-owl-bot[bot]` to `ignoreAuthors` ([#1331](https://www.github.com/googleapis/java-bigquery/issues/1331)) ([1f19362](https://www.github.com/googleapis/java-bigquery/commit/1f19362ad5527a12e4fadc1df42523857b6e709a)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210518-1.31.0 ([#1341](https://www.github.com/googleapis/java-bigquery/issues/1341)) ([a37a2f5](https://www.github.com/googleapis/java-bigquery/commit/a37a2f5240bb7b0681b8e5e70801d96b4a5675e5)) +* update dependency com.google.cloud:google-cloud-bigtable to v1.25.0 ([#1334](https://www.github.com/googleapis/java-bigquery/issues/1334)) ([f7be534](https://www.github.com/googleapis/java-bigquery/commit/f7be534a5a7fb79232bcd3d082365033262f6dcb)) +* update dependency com.google.cloud:google-cloud-bigtable to v1.26.0 ([#1347](https://www.github.com/googleapis/java-bigquery/issues/1347)) ([55f3e7b](https://www.github.com/googleapis/java-bigquery/commit/55f3e7b359cc197767e75f026f572110649c2ba7)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v1.3.0 ([#1348](https://www.github.com/googleapis/java-bigquery/issues/1348)) ([ab41045](https://www.github.com/googleapis/java-bigquery/commit/ab41045628a6735de3223bb22285f0affc269c75)) +* update dependency com.google.cloud:google-cloud-storage to v1.115.0 ([#1344](https://www.github.com/googleapis/java-bigquery/issues/1344)) ([0ee09f7](https://www.github.com/googleapis/java-bigquery/commit/0ee09f779c116c000be1cc99c6b1d52a14b33403)) +* update jmh.version to v1.32 ([#1340](https://www.github.com/googleapis/java-bigquery/issues/1340)) ([dfaa49e](https://www.github.com/googleapis/java-bigquery/commit/dfaa49e06715198a012385fad4a37160403b54f4)) + +### [1.131.1](https://www.github.com/googleapis/java-bigquery/compare/v1.131.0...v1.131.1) (2021-05-19) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-shared-dependencies to v1.2.0 ([#1324](https://www.github.com/googleapis/java-bigquery/issues/1324)) ([dd238db](https://www.github.com/googleapis/java-bigquery/commit/dd238db05664e588aca00e4f10e368d3a574d605)) +* update dependency com.google.cloud:google-cloud-storage to v1.114.0 ([#1306](https://www.github.com/googleapis/java-bigquery/issues/1306)) ([a2cb66a](https://www.github.com/googleapis/java-bigquery/commit/a2cb66a60a44748e7aa8d4f4b5386e46191557b7)) + +## [1.131.0](https://www.github.com/googleapis/java-bigquery/compare/v1.130.0...v1.131.0) (2021-05-18) + + +### Features + +* add ParquetOptions support and expose it in LoadJobConfiguration and ExternalTableDefinition classes ([#1318](https://www.github.com/googleapis/java-bigquery/issues/1318)) ([72b1715](https://www.github.com/googleapis/java-bigquery/commit/72b17151c4f3c4a3d298d3791e58c3112a14b4f5)) + +## [1.130.0](https://www.github.com/googleapis/java-bigquery/compare/v1.129.0...v1.130.0) (2021-05-15) + + +### Features + +* add support for partitioning and clustering in MaterializedViewDefinition ([#1301](https://www.github.com/googleapis/java-bigquery/issues/1301)) ([b909754](https://www.github.com/googleapis/java-bigquery/commit/b909754b4c828871c8b360d0ddf69cc488b1d33c)), closes [#1300](https://www.github.com/googleapis/java-bigquery/issues/1300) + + +### Dependencies + +* update dependency com.google.api.grpc:proto-google-cloud-datacatalog-v1 to v1.3.3 ([#1296](https://www.github.com/googleapis/java-bigquery/issues/1296)) ([558cb14](https://www.github.com/googleapis/java-bigquery/commit/558cb14a4091c9e44dea766b967ecb78132a46c5)) +* update dependency com.google.cloud:google-cloud-bigtable to v1.24.1 ([#1305](https://www.github.com/googleapis/java-bigquery/issues/1305)) ([96363a9](https://www.github.com/googleapis/java-bigquery/commit/96363a969f8ac8ffb3627e8a0da69c030dc9f862)) +* update dependency com.google.cloud:google-cloud-datacatalog to v1.3.3 ([#1297](https://www.github.com/googleapis/java-bigquery/issues/1297)) ([c49e697](https://www.github.com/googleapis/java-bigquery/commit/c49e697086f6aaf8271daf799b985b468496c382)) +* update jmh.version to v1.31 ([#1303](https://www.github.com/googleapis/java-bigquery/issues/1303)) ([817cf7b](https://www.github.com/googleapis/java-bigquery/commit/817cf7bbecc920e9866cd85f2b1689926933c4da)) + +## [1.129.0](https://www.github.com/googleapis/java-bigquery/compare/v1.128.3...v1.129.0) (2021-05-11) + + +### Features + +* add support for user defined TVFs ([#1278](https://www.github.com/googleapis/java-bigquery/issues/1278)) ([89958e9](https://www.github.com/googleapis/java-bigquery/commit/89958e9b5e4330b31878aa31b90569d2fd0310f2)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210422-1.31.0 ([#1275](https://www.github.com/googleapis/java-bigquery/issues/1275)) ([acc88c5](https://www.github.com/googleapis/java-bigquery/commit/acc88c5de445d166055825bf51d56a031e642675)) +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210430-1.31.0 ([#1285](https://www.github.com/googleapis/java-bigquery/issues/1285)) ([e74ae1c](https://www.github.com/googleapis/java-bigquery/commit/e74ae1c180c8ce27c9c8985c1935a894889b6375)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v1.1.0 ([#1290](https://www.github.com/googleapis/java-bigquery/issues/1290)) ([e60f2cd](https://www.github.com/googleapis/java-bigquery/commit/e60f2cd823b1551e43154f87a5157acca51b346c)) +* update jmh.version to v1.30 ([#1281](https://www.github.com/googleapis/java-bigquery/issues/1281)) ([3704a6c](https://www.github.com/googleapis/java-bigquery/commit/3704a6cc46b84ce3b5ba01466ba39333f1b16886)) + +### [1.128.3](https://www.github.com/googleapis/java-bigquery/compare/v1.128.2...v1.128.3) (2021-04-30) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v1.24.0 ([#1269](https://www.github.com/googleapis/java-bigquery/issues/1269)) ([f006fa9](https://www.github.com/googleapis/java-bigquery/commit/f006fa9b4e5029c6eaa0440308c33f7c6b963b50)) + +### [1.128.2](https://www.github.com/googleapis/java-bigquery/compare/v1.128.1...v1.128.2) (2021-04-30) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v1.23.2 ([#1255](https://www.github.com/googleapis/java-bigquery/issues/1255)) ([bc6075b](https://www.github.com/googleapis/java-bigquery/commit/bc6075b1eefaf9a65fc5f4d27ab8a7b76d499d35)) +* update dependency com.google.cloud:google-cloud-storage to v1.113.16 ([#1259](https://www.github.com/googleapis/java-bigquery/issues/1259)) ([8c0b85b](https://www.github.com/googleapis/java-bigquery/commit/8c0b85b5b9a9661b10181587c29e819ee5b6a708)) + +### [1.128.1](https://www.github.com/googleapis/java-bigquery/compare/v1.128.0...v1.128.1) (2021-04-26) + + +### Bug Fixes + +* release scripts from issuing overlapping phases ([#1241](https://www.github.com/googleapis/java-bigquery/issues/1241)) ([b2bbc90](https://www.github.com/googleapis/java-bigquery/commit/b2bbc90d968573e4e2e7c0785da3b5ae4c1ac0d7)) +* typo ([#1236](https://www.github.com/googleapis/java-bigquery/issues/1236)) ([e2e7f90](https://www.github.com/googleapis/java-bigquery/commit/e2e7f900db11c513818339208eee28cccc5aa76f)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210410-1.31.0 ([#1245](https://www.github.com/googleapis/java-bigquery/issues/1245)) ([b00ded9](https://www.github.com/googleapis/java-bigquery/commit/b00ded92dfe553f9cf06624606717729f1103d6b)) +* update dependency com.google.cloud:google-cloud-bigtable to v1.23.0 ([#1239](https://www.github.com/googleapis/java-bigquery/issues/1239)) ([18c17ab](https://www.github.com/googleapis/java-bigquery/commit/18c17ab30e893763d4075f40242aceec4c1d14d4)) +* update dependency com.google.cloud:google-cloud-bigtable to v1.23.1 ([#1248](https://www.github.com/googleapis/java-bigquery/issues/1248)) ([5306bf1](https://www.github.com/googleapis/java-bigquery/commit/5306bf1cd86c6e6bc71374f1a33dcd54b8bcd578)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.21.1 ([#1246](https://www.github.com/googleapis/java-bigquery/issues/1246)) ([119a378](https://www.github.com/googleapis/java-bigquery/commit/119a378d6619098b4e123475c2ac7657e26c52e3)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v1 ([#1252](https://www.github.com/googleapis/java-bigquery/issues/1252)) ([80e63b4](https://www.github.com/googleapis/java-bigquery/commit/80e63b4bec2eaf3d53f900b8b226c156d095f262)) +* update dependency com.google.cloud:google-cloud-storage to v1.113.15 ([#1238](https://www.github.com/googleapis/java-bigquery/issues/1238)) ([6997434](https://www.github.com/googleapis/java-bigquery/commit/69974342628d1718ae79d810f9a89c264f77b878)) + +## [1.128.0](https://www.github.com/googleapis/java-bigquery/compare/v1.127.12...v1.128.0) (2021-04-09) + + +### Features + +* add support for parameterMode in QueryJobConfiguration to unblock JDBC migration to the Java client library ([#1223](https://www.github.com/googleapis/java-bigquery/issues/1223)) ([3ce4933](https://www.github.com/googleapis/java-bigquery/commit/3ce49334478dc0905cdcb476c739a49d296de922)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210404-1.31.0 ([#1226](https://www.github.com/googleapis/java-bigquery/issues/1226)) ([8ea26fc](https://www.github.com/googleapis/java-bigquery/commit/8ea26fcedf1f34565d287daab388c2a93d7ac6ea)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.21.0 ([#1224](https://www.github.com/googleapis/java-bigquery/issues/1224)) ([d5c5747](https://www.github.com/googleapis/java-bigquery/commit/d5c5747e74d8c2e1ca0901eea0d82fd94460a639)) +* update dependency com.google.oauth-client:google-oauth-client-java6 to v1.31.5 ([#1221](https://www.github.com/googleapis/java-bigquery/issues/1221)) ([f7cdb36](https://www.github.com/googleapis/java-bigquery/commit/f7cdb36c1f9e513e78e390ae5319c70f7c454536)) +* update dependency com.google.oauth-client:google-oauth-client-jetty to v1.31.5 ([#1222](https://www.github.com/googleapis/java-bigquery/issues/1222)) ([0a271cb](https://www.github.com/googleapis/java-bigquery/commit/0a271cb363180a093385ed6c5c68e12c6fde6502)) + +### [1.127.12](https://www.github.com/googleapis/java-bigquery/compare/v1.127.11...v1.127.12) (2021-04-07) + + +### Documentation + +* **samples:** update querypagination sample ([#1209](https://www.github.com/googleapis/java-bigquery/issues/1209)) ([9b4f3ec](https://www.github.com/googleapis/java-bigquery/commit/9b4f3ecc248fd6fac2b9542b14d6f343bcb8463c)) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210327-1.31.0 ([#1214](https://www.github.com/googleapis/java-bigquery/issues/1214)) ([bf5d444](https://www.github.com/googleapis/java-bigquery/commit/bf5d444103f5e6dc3db1751a9e3fd382cf42cb5d)) +* update dependency com.google.cloud:google-cloud-bigtable to v1.21.3 ([#1211](https://www.github.com/googleapis/java-bigquery/issues/1211)) ([6b85ab3](https://www.github.com/googleapis/java-bigquery/commit/6b85ab3daddeb490ccb2877a0ce22d1e1c403a07)) +* update dependency com.google.cloud:google-cloud-bigtable to v1.22.0 ([#1217](https://www.github.com/googleapis/java-bigquery/issues/1217)) ([503d32b](https://www.github.com/googleapis/java-bigquery/commit/503d32bdaede96207203d79eebbf6964a199b8f8)) + +### [1.127.11](https://www.github.com/googleapis/java-bigquery/compare/v1.127.10...v1.127.11) (2021-03-25) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v1.21.2 ([#1195](https://www.github.com/googleapis/java-bigquery/issues/1195)) ([8e3618e](https://www.github.com/googleapis/java-bigquery/commit/8e3618ec69009712c2616a52efdca4eb0dfbc5d8)) + +### [1.127.10](https://www.github.com/googleapis/java-bigquery/compare/v1.127.9...v1.127.10) (2021-03-19) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210313-1.31.0 ([#1187](https://www.github.com/googleapis/java-bigquery/issues/1187)) ([bba71f2](https://www.github.com/googleapis/java-bigquery/commit/bba71f2f475f6dd8bf2a9d567aaed26377543abe)) + +### [1.127.9](https://www.github.com/googleapis/java-bigquery/compare/v1.127.8...v1.127.9) (2021-03-17) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v1.21.1 ([#1181](https://www.github.com/googleapis/java-bigquery/issues/1181)) ([fbbf96a](https://www.github.com/googleapis/java-bigquery/commit/fbbf96aecd3b49adb1d180652eff02a562449cce)) +* update dependency com.google.cloud:google-cloud-storage to v1.113.14 ([#1176](https://www.github.com/googleapis/java-bigquery/issues/1176)) ([c84fc5c](https://www.github.com/googleapis/java-bigquery/commit/c84fc5c5f7f9b5d30e0dae921542a53cb20b8f37)) + +### [1.127.8](https://www.github.com/googleapis/java-bigquery/compare/v1.127.7...v1.127.8) (2021-03-11) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210303-1.31.0 ([#1171](https://www.github.com/googleapis/java-bigquery/issues/1171)) ([ba27951](https://www.github.com/googleapis/java-bigquery/commit/ba27951225418b49635607a7e9a913f6b2328575)) + +### [1.127.7](https://www.github.com/googleapis/java-bigquery/compare/v1.127.6...v1.127.7) (2021-03-10) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-bigtable to v1.21.0 ([#1159](https://www.github.com/googleapis/java-bigquery/issues/1159)) ([624cefc](https://www.github.com/googleapis/java-bigquery/commit/624cefc8e658f5fc47c4a29d1dd71a76c46edfa9)) +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.20.1 ([#1166](https://www.github.com/googleapis/java-bigquery/issues/1166)) ([58e2ddf](https://www.github.com/googleapis/java-bigquery/commit/58e2ddf2250d135a8e16afc98bbed9bb62ba38cf)) +* update dependency com.google.cloud:google-cloud-storage to v1.113.13 ([#1164](https://www.github.com/googleapis/java-bigquery/issues/1164)) ([34c6843](https://www.github.com/googleapis/java-bigquery/commit/34c684397cf4ddb1fb2e7b7cac68ef5c12dc8b92)) + +### [1.127.6](https://www.github.com/googleapis/java-bigquery/compare/v1.127.5...v1.127.6) (2021-03-02) + + +### Dependencies + +* update dependency com.google.apis:google-api-services-bigquery to v2-rev20210219-1.31.0 ([#1149](https://www.github.com/googleapis/java-bigquery/issues/1149)) ([5c7e32f](https://www.github.com/googleapis/java-bigquery/commit/5c7e32f890bb1fa5cc25641f8a73afb775eeb0a8)) +* update dependency com.google.cloud:google-cloud-storage to v1.113.12 ([#1153](https://www.github.com/googleapis/java-bigquery/issues/1153)) ([9a058a7](https://www.github.com/googleapis/java-bigquery/commit/9a058a746a27d34829e12e4e3ff4a838560181b1)) +* update jmh.version to v1.28 ([#1151](https://www.github.com/googleapis/java-bigquery/issues/1151)) ([4cafa86](https://www.github.com/googleapis/java-bigquery/commit/4cafa863f2bcc1ae36c493c616aea6c699242015)) + +### [1.127.5](https://www.github.com/googleapis/java-bigquery/compare/v1.127.4...v1.127.5) (2021-02-25) + + +### Dependencies + +* update dependency com.google.cloud:google-cloud-shared-dependencies to v0.20.0 ([#1141](https://www.github.com/googleapis/java-bigquery/issues/1141)) ([b5c90db](https://www.github.com/googleapis/java-bigquery/commit/b5c90db802d19d1d3426cc4228061f7b6aafa28e)) + ### [1.127.4](https://www.github.com/googleapis/java-bigquery/compare/v1.127.3...v1.127.4) (2021-02-23) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f2dbdee06b..5456fad053 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,9 +18,13 @@ again. ## Code reviews All submissions, including submissions by project members, require review. We -use GitHub pull requests for this purpose. Consult -[GitHub Help](https://help.github.com/articles/about-pull-requests/) for more -information on using pull requests. +use GitHub pull requests for this purpose. Consult the +[GitHub Help: about pull requests](https://help.github.com/articles/about-pull-requests/) +article for more information on using pull requests. If you do not have +permission to create a branch, then fork the repository and submit a pull +request from the forked repository. Consult +[Github Help: about forks](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/working-with-forks/fork-a-repo#about-forks) +article for more information. ## Community Guidelines @@ -53,12 +57,12 @@ mvn -Penable-integration-tests clean verify ## Code Samples -Code Samples must be bundled in separate Maven modules, and guarded by a -Maven profile with the name `enable-samples`. +All code samples must be in compliance with the [java sample formatting guide][3]. +Code Samples must be bundled in separate Maven modules. The samples must be separate from the primary project for a few reasons: -1. Primary projects have a minimum Java version of Java 7 whereas samples have - a minimum Java version of Java 8. Due to this we need the ability to +1. Primary projects have a minimum Java version of Java 8 whereas samples can have + Java version of Java 11. Due to this we need the ability to selectively exclude samples from a build run. 2. Many code samples depend on external GCP services and need credentials to access the service. @@ -68,72 +72,25 @@ The samples must be separate from the primary project for a few reasons: ### Building ```bash -mvn -Penable-samples clean verify +mvn clean verify ``` Some samples require access to GCP services and require a service account: ```bash export GOOGLE_APPLICATION_CREDENTIALS=/path/to/service/account.json -mvn -Penable-samples clean verify +mvn clean verify ``` -### Profile Config - -1. To add samples in a profile to your Maven project, add the following to your -`pom.xml` - - ```xml - - [...] - - - enable-samples - - sample - - - - [...] - - ``` - -2. [Activate](#profile-activation) the profile. -3. Define your samples in a normal Maven project in the `samples/` directory. - ### Code Formatting Code in this repo is formatted with [google-java-format](https://github.com/google/google-java-format). To run formatting on your project, you can run: ``` -mvn com.coveo:fmt-maven-plugin:format +mvn com.spotify.fmt:fmt-maven-plugin:format ``` -### Profile Activation - -To include code samples when building and testing the project, enable the -`enable-samples` Maven profile. - -#### Command line - -To activate the Maven profile on the command line add `-Penable-samples` to your -Maven command. - -#### Maven `settings.xml` - -To activate the Maven profile in your `~/.m2/settings.xml` add an entry of -`enable-samples` following the instructions in [Active Profiles][2]. - -This method has the benefit of applying to all projects you build (and is -respected by IntelliJ IDEA) and is recommended if you are going to be -contributing samples to several projects. - -#### IntelliJ IDEA - -To activate the Maven Profile inside IntelliJ IDEA, follow the instructions in -[Activate Maven profiles][3] to activate `enable-samples`. - [1]: https://cloud.google.com/docs/authentication/getting-started#creating_a_service_account [2]: https://maven.apache.org/settings.html#Active_Profiles -[3]: https://www.jetbrains.com/help/idea/work-with-maven-profiles.html#activate_maven_profiles +[3]: https://github.com/GoogleCloudPlatform/java-docs-samples/blob/main/SAMPLE_FORMAT.md \ No newline at end of file diff --git a/README.md b/README.md index 92b0437ea6..297a8826ec 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +**_THIS REPOSITORY IS DEPRECATED. ALL OF ITS CONTENT AND HISTORY HAS BEEN MOVED TO [GOOGLE-CLOUD-JAVA](https://github.com/googleapis/google-cloud-java/tree/main/java-bigquery)_** + # Google Cloud BigQuery Client for Java Java idiomatic client for [Cloud BigQuery][product-docs]. @@ -8,9 +10,11 @@ Java idiomatic client for [Cloud BigQuery][product-docs]. - [Product Documentation][product-docs] - [Client Library Documentation][javadocs] + ## Quickstart -If you are using Maven with [BOM][libraries-bom], add this to your pom.xml file +If you are using Maven with [BOM][libraries-bom], add this to your pom.xml file: + ```xml @@ -19,7 +23,7 @@ See https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google com.google.cloud libraries-bom - 18.0.0 + 26.62.0 pom import @@ -31,40 +35,51 @@ See https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google com.google.cloud google-cloud-bigquery + ``` -If you are using Maven without BOM, add this to your dependencies: +If you are using Maven without the BOM, add this to your dependencies: + + ```xml com.google.cloud google-cloud-bigquery - 1.127.4 + 2.42.2 ``` -If you are using Gradle 5.x or later, add this to your dependencies +If you are using Gradle 5.x or later, add this to your dependencies: + ```Groovy -implementation platform('com.google.cloud:libraries-bom:18.0.0') +implementation platform('com.google.cloud:libraries-bom:26.45.0') -compile 'com.google.cloud:google-cloud-bigquery' +implementation 'com.google.cloud:google-cloud-bigquery' ``` -If you are using Gradle without BOM, add this to your dependencies +If you are using Gradle without BOM, add this to your dependencies: + ```Groovy -compile 'com.google.cloud:google-cloud-bigquery:1.127.4' +implementation 'com.google.cloud:google-cloud-bigquery:2.42.2' ``` -If you are using SBT, add this to your dependencies +If you are using SBT, add this to your dependencies: + ```Scala -libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "1.127.4" +libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "2.42.2" ``` + ## Authentication See the [Authentication][authentication] section in the base directory's README. +## Authorization + +The client application making API calls must be granted [authorization scopes][auth-scopes] required for the desired Cloud BigQuery APIs, and the authenticated principal must have the [IAM role(s)][predefined-iam-roles] required to access GCP resources using the Cloud BigQuery API calls. + ## Getting Started ### Prerequisites @@ -72,7 +87,7 @@ See the [Authentication][authentication] section in the base directory's README. You will need a [Google Cloud Platform Console][developer-console] project with the Cloud BigQuery [API enabled][enable-api]. You will need to [enable billing][enable-billing] to use Google Cloud BigQuery. [Follow these instructions][create-project] to get your project set up. You will also need to set up the local development environment by -[installing the Google Cloud SDK][cloud-sdk] and running the following commands in command line: +[installing the Google Cloud Command Line Interface][cloud-cli] and running the following commands in command line: `gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. ### Installation and setup @@ -96,143 +111,150 @@ use this Cloud BigQuery Client Library. ## Samples -Samples are in the [`samples/`](https://github.com/googleapis/java-bigquery/tree/master/samples) directory. The samples' `README.md` -has instructions for running the samples. +Samples are in the [`samples/`](https://github.com/googleapis/java-bigquery/tree/main/samples) directory. | Sample | Source Code | Try it | | --------------------------- | --------------------------------- | ------ | -| Add Column Load Append | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java) | -| Add Empty Column | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/AddEmptyColumn.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AddEmptyColumn.java) | -| Alter Materialized View | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/AlterMaterializedView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AlterMaterializedView.java) | -| Auth Drive Scope | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/AuthDriveScope.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthDriveScope.java) | -| Auth Snippets | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/AuthSnippets.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthSnippets.java) | -| Auth User Flow | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/AuthUserFlow.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthUserFlow.java) | -| Auth User Query | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/AuthUserQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthUserQuery.java) | -| Authorized View Tutorial | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/AuthorizedViewTutorial.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthorizedViewTutorial.java) | -| Browse Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/BrowseTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/BrowseTable.java) | -| Cancel Job | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CancelJob.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CancelJob.java) | -| Copy Multiple Tables | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CopyMultipleTables.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CopyMultipleTables.java) | -| Copy Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CopyTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CopyTable.java) | -| Copy Table Cmek | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CopyTableCmek.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CopyTableCmek.java) | -| Create Clustered Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java) | -| Create Dataset | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateDataset.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateDataset.java) | -| Create Dataset Aws | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateDatasetAws.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateDatasetAws.java) | -| Create External Table Aws | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateExternalTableAws.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateExternalTableAws.java) | -| Create Iam Policy | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java) | -| Create Job | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateJob.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateJob.java) | -| Create Materialized View | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateMaterializedView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateMaterializedView.java) | -| Create Model | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateModel.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateModel.java) | -| Create Partitioned Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java) | -| Create Range Partitioned Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateRangePartitionedTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateRangePartitionedTable.java) | -| Create Routine | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateRoutine.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateRoutine.java) | -| Create Routine Ddl | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateRoutineDdl.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateRoutineDdl.java) | -| Create Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateTable.java) | -| Create Table Cmek | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateTableCmek.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateTableCmek.java) | -| Create Table External Hive Partitioned | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateTableExternalHivePartitioned.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateTableExternalHivePartitioned.java) | -| Create Table Without Schema | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateTableWithoutSchema.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateTableWithoutSchema.java) | -| Create View | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/CreateView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateView.java) | -| Dataset Exists | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/DatasetExists.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DatasetExists.java) | -| Ddl Create View | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/DdlCreateView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DdlCreateView.java) | -| Delete Dataset | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/DeleteDataset.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteDataset.java) | -| Delete Dataset And Contents | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/DeleteDatasetAndContents.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteDatasetAndContents.java) | -| Delete Label Dataset | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/DeleteLabelDataset.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteLabelDataset.java) | -| Delete Label Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/DeleteLabelTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteLabelTable.java) | -| Delete Materialized View | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/DeleteMaterializedView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteMaterializedView.java) | -| Delete Model | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/DeleteModel.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteModel.java) | -| Delete Routine | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/DeleteRoutine.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteRoutine.java) | -| Delete Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/DeleteTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteTable.java) | -| Export Query Results To S3 | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ExportQueryResultsToS3.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ExportQueryResultsToS3.java) | -| Extract Model | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ExtractModel.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ExtractModel.java) | -| Extract Table Compressed | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ExtractTableCompressed.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ExtractTableCompressed.java) | -| Extract Table To Csv | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToCsv.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ExtractTableToCsv.java) | -| Extract Table To Json | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java) | -| Get Dataset Info | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/GetDatasetInfo.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetDatasetInfo.java) | -| Get Dataset Labels | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/GetDatasetLabels.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetDatasetLabels.java) | -| Get Job | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/GetJob.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetJob.java) | -| Get Model | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/GetModel.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetModel.java) | -| Get Routine | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/GetRoutine.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetRoutine.java) | -| Get Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/GetTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetTable.java) | -| Get Table Labels | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/GetTableLabels.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetTableLabels.java) | -| Get View | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/GetView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetView.java) | -| Grant View Access | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/GrantViewAccess.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GrantViewAccess.java) | -| Inserting Data Types | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/InsertingDataTypes.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/InsertingDataTypes.java) | -| Label Dataset | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LabelDataset.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LabelDataset.java) | -| Label Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LabelTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LabelTable.java) | -| List Datasets | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ListDatasets.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListDatasets.java) | -| List Datasets By Label | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ListDatasetsByLabel.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListDatasetsByLabel.java) | -| List Jobs | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ListJobs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListJobs.java) | -| List Models | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ListModels.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListModels.java) | -| List Routines | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ListRoutines.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListRoutines.java) | -| List Tables | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ListTables.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListTables.java) | -| Load Avro From Gcs | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadAvroFromGcs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadAvroFromGcs.java) | -| Load Avro From Gcs Truncate | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadAvroFromGcsTruncate.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadAvroFromGcsTruncate.java) | -| Load Csv From Gcs | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcs.java) | -| Load Csv From Gcs Autodetect | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcsAutodetect.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcsAutodetect.java) | -| Load Csv From Gcs Truncate | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcsTruncate.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcsTruncate.java) | -| Load Json From Gcs | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcs.java) | -| Load Json From Gcs Autodetect | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsAutodetect.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsAutodetect.java) | -| Load Json From Gcs Cmek | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsCmek.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsCmek.java) | -| Load Json From Gcs Truncate | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsTruncate.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsTruncate.java) | -| Load Local File | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java) | -| Load Orc From Gcs | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadOrcFromGcs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadOrcFromGcs.java) | -| Load Orc From Gcs Truncate | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadOrcFromGcsTruncate.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadOrcFromGcsTruncate.java) | -| Load Parquet | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java) | -| Load Parquet Replace Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java) | -| Load Partitioned Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadPartitionedTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadPartitionedTable.java) | -| Load Table Clustered | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java) | -| Nested Repeated Schema | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/NestedRepeatedSchema.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/NestedRepeatedSchema.java) | -| Query | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/Query.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/Query.java) | -| Query Batch | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryBatch.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryBatch.java) | -| Query Clustered Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryClusteredTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryClusteredTable.java) | -| Query Destination Table Cmek | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryDestinationTableCmek.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryDestinationTableCmek.java) | -| Query Disable Cache | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryDisableCache.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryDisableCache.java) | -| Query Dry Run | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryDryRun.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryDryRun.java) | -| Query External Bigtable Perm | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtablePerm.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtablePerm.java) | -| Query External Bigtable Temp | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtableTemp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtableTemp.java) | -| Query External Gcs Perm | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryExternalGcsPerm.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalGcsPerm.java) | -| Query External Gcs Temp | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryExternalGcsTemp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalGcsTemp.java) | -| Query External Sheets Perm | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryExternalSheetsPerm.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalSheetsPerm.java) | -| Query External Sheets Temp | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryExternalSheetsTemp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalSheetsTemp.java) | -| Query External Table Aws | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryExternalTableAws.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalTableAws.java) | -| Query Large Results | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryLargeResults.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryLargeResults.java) | -| Query Materialized View | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryMaterializedView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryMaterializedView.java) | -| Query Pagination | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryPagination.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryPagination.java) | -| Query Partitioned Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryPartitionedTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryPartitionedTable.java) | -| Query Script | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryScript.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryScript.java) | -| Query Total Rows | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryTotalRows.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryTotalRows.java) | -| Query With Array Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayParameters.java) | -| Query With Named Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryWithNamedParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithNamedParameters.java) | -| Query With Named Types Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryWithNamedTypesParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithNamedTypesParameters.java) | -| Query With Positional Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryWithPositionalParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithPositionalParameters.java) | -| Query With Positional Types Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryWithPositionalTypesParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithPositionalTypesParameters.java) | -| Query With Structs Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryWithStructsParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithStructsParameters.java) | -| Query With Timestamp Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java) | -| Quickstart Sample | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/QuickstartSample.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QuickstartSample.java) | -| Relax Column Load Append | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/RelaxColumnLoadAppend.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/RelaxColumnLoadAppend.java) | -| Relax Column Mode | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/RelaxColumnMode.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/RelaxColumnMode.java) | -| Relax Table Query | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/RelaxTableQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/RelaxTableQuery.java) | -| Resource Clean Up | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java) | -| Run Legacy Query | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/RunLegacyQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/RunLegacyQuery.java) | -| Save Query To Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/SaveQueryToTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SaveQueryToTable.java) | -| Simple App | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java) | -| Simple Query | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/SimpleQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SimpleQuery.java) | -| Table Exists | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/TableExists.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/TableExists.java) | -| Table Insert Rows | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java) | -| Table Insert Rows Without Row Ids | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/TableInsertRowsWithoutRowIds.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/TableInsertRowsWithoutRowIds.java) | -| Undelete Table | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UndeleteTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UndeleteTable.java) | -| Update Dataset Access | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java) | -| Update Dataset Description | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetDescription.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetDescription.java) | -| Update Dataset Expiration | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java) | -| Update Dataset Partition Expiration | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetPartitionExpiration.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetPartitionExpiration.java) | -| Update Iam Policy | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateIamPolicy.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateIamPolicy.java) | -| Update Model Description | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateModelDescription.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateModelDescription.java) | -| Update Routine | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateRoutine.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateRoutine.java) | -| Update Table Cmek | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateTableCmek.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateTableCmek.java) | -| Update Table Description | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateTableDescription.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateTableDescription.java) | -| Update Table Dml | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateTableDml.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateTableDml.java) | -| Update Table Expiration | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java) | -| Update Table Require Partition Filter | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateTableRequirePartitionFilter.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateTableRequirePartitionFilter.java) | -| Update View Query | [source code](https://github.com/googleapis/java-bigquery/blob/master/samples/snippets/src/main/java/com/example/bigquery/UpdateViewQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateViewQuery.java) | +| Add Column Load Append | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AddColumnLoadAppend.java) | +| Add Empty Column | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AddEmptyColumn.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AddEmptyColumn.java) | +| Auth Drive Scope | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AuthDriveScope.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthDriveScope.java) | +| Auth Snippets | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AuthSnippets.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthSnippets.java) | +| Auth User Flow | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AuthUserFlow.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthUserFlow.java) | +| Auth User Query | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AuthUserQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthUserQuery.java) | +| Authorize Dataset | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AuthorizeDataset.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthorizeDataset.java) | +| Authorized View Tutorial | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/AuthorizedViewTutorial.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/AuthorizedViewTutorial.java) | +| Browse Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/BrowseTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/BrowseTable.java) | +| Cancel Job | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CancelJob.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CancelJob.java) | +| Copy Multiple Tables | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CopyMultipleTables.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CopyMultipleTables.java) | +| Copy Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CopyTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CopyTable.java) | +| Copy Table Cmek | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CopyTableCmek.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CopyTableCmek.java) | +| Create And Query Repeated Record Field | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateAndQueryRepeatedRecordField.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateAndQueryRepeatedRecordField.java) | +| Create Clustered Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateClusteredTable.java) | +| Create Dataset | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateDataset.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateDataset.java) | +| Create Dataset Aws | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateDatasetAws.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateDatasetAws.java) | +| Create Dataset With Regional Endpoint | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateDatasetWithRegionalEndpoint.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateDatasetWithRegionalEndpoint.java) | +| Create External Table Aws | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateExternalTableAws.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateExternalTableAws.java) | +| Create Iam Policy | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java) | +| Create Job | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateJob.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateJob.java) | +| Create Materialized View | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateMaterializedView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateMaterializedView.java) | +| Create Model | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateModel.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateModel.java) | +| Create Partitioned Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreatePartitionedTable.java) | +| Create Range Partitioned Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateRangePartitionedTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateRangePartitionedTable.java) | +| Create Routine | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateRoutine.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateRoutine.java) | +| Create Routine Ddl | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateRoutineDdl.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateRoutineDdl.java) | +| Create Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateTable.java) | +| Create Table Cmek | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateTableCmek.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateTableCmek.java) | +| Create Table External Hive Partitioned | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateTableExternalHivePartitioned.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateTableExternalHivePartitioned.java) | +| Create Table Without Schema | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateTableWithoutSchema.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateTableWithoutSchema.java) | +| Create Tables With Primary And Foreign Keys | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateTablesWithPrimaryAndForeignKeys.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateTablesWithPrimaryAndForeignKeys.java) | +| Create View | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/CreateView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/CreateView.java) | +| Dataset Exists | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/DatasetExists.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DatasetExists.java) | +| Ddl Create View | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/DdlCreateView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DdlCreateView.java) | +| Delete Dataset | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/DeleteDataset.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteDataset.java) | +| Delete Dataset And Contents | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/DeleteDatasetAndContents.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteDatasetAndContents.java) | +| Delete Label Dataset | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/DeleteLabelDataset.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteLabelDataset.java) | +| Delete Label Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/DeleteLabelTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteLabelTable.java) | +| Delete Materialized View | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/DeleteMaterializedView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteMaterializedView.java) | +| Delete Model | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/DeleteModel.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteModel.java) | +| Delete Routine | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/DeleteRoutine.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteRoutine.java) | +| Delete Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/DeleteTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/DeleteTable.java) | +| Export Query Results To S3 | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ExportQueryResultsToS3.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ExportQueryResultsToS3.java) | +| Extract Model | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ExtractModel.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ExtractModel.java) | +| Extract Table Compressed | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ExtractTableCompressed.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ExtractTableCompressed.java) | +| Extract Table To Csv | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToCsv.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ExtractTableToCsv.java) | +| Extract Table To Json | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ExtractTableToJson.java) | +| Get Dataset Info | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/GetDatasetInfo.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetDatasetInfo.java) | +| Get Dataset Labels | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/GetDatasetLabels.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetDatasetLabels.java) | +| Get Job | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/GetJob.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetJob.java) | +| Get Model | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/GetModel.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetModel.java) | +| Get Routine | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/GetRoutine.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetRoutine.java) | +| Get Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/GetTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetTable.java) | +| Get Table Labels | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/GetTableLabels.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetTableLabels.java) | +| Get View | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/GetView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GetView.java) | +| Grant View Access | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/GrantViewAccess.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/GrantViewAccess.java) | +| Inserting Data Types | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/InsertingDataTypes.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/InsertingDataTypes.java) | +| Label Dataset | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LabelDataset.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LabelDataset.java) | +| Label Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LabelTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LabelTable.java) | +| List Datasets | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ListDatasets.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListDatasets.java) | +| List Datasets By Label | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ListDatasetsByLabel.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListDatasetsByLabel.java) | +| List Jobs | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ListJobs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListJobs.java) | +| List Models | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ListModels.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListModels.java) | +| List Routines | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ListRoutines.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListRoutines.java) | +| List Tables | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ListTables.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ListTables.java) | +| Load Avro From Gcs | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadAvroFromGcs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadAvroFromGcs.java) | +| Load Avro From Gcs Truncate | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadAvroFromGcsTruncate.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadAvroFromGcsTruncate.java) | +| Load Csv From Gcs | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcs.java) | +| Load Csv From Gcs Autodetect | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcsAutodetect.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcsAutodetect.java) | +| Load Csv From Gcs Truncate | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcsTruncate.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadCsvFromGcsTruncate.java) | +| Load Json From Gcs | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcs.java) | +| Load Json From Gcs Autodetect | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsAutodetect.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsAutodetect.java) | +| Load Json From Gcs Cmek | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsCmek.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsCmek.java) | +| Load Json From Gcs Truncate | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsTruncate.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadJsonFromGcsTruncate.java) | +| Load Local File | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadLocalFile.java) | +| Load Local File In Session | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFileInSession.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadLocalFileInSession.java) | +| Load Orc From Gcs | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadOrcFromGcs.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadOrcFromGcs.java) | +| Load Orc From Gcs Truncate | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadOrcFromGcsTruncate.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadOrcFromGcsTruncate.java) | +| Load Parquet | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadParquet.java) | +| Load Parquet Replace Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadParquetReplaceTable.java) | +| Load Partitioned Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadPartitionedTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadPartitionedTable.java) | +| Load Table Clustered | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/LoadTableClustered.java) | +| Nested Repeated Schema | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/NestedRepeatedSchema.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/NestedRepeatedSchema.java) | +| Query Batch | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryBatch.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryBatch.java) | +| Query Clustered Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryClusteredTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryClusteredTable.java) | +| Query Destination Table Cmek | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryDestinationTableCmek.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryDestinationTableCmek.java) | +| Query Disable Cache | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryDisableCache.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryDisableCache.java) | +| Query Dry Run | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryDryRun.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryDryRun.java) | +| Query External Bigtable Perm | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtablePerm.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtablePerm.java) | +| Query External Bigtable Temp | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtableTemp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtableTemp.java) | +| Query External Gcs Perm | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryExternalGcsPerm.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalGcsPerm.java) | +| Query External Gcs Temp | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryExternalGcsTemp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalGcsTemp.java) | +| Query External Sheets Perm | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryExternalSheetsPerm.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalSheetsPerm.java) | +| Query External Sheets Temp | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryExternalSheetsTemp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalSheetsTemp.java) | +| Query External Table Aws | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryExternalTableAws.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryExternalTableAws.java) | +| Query Large Results | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryLargeResults.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryLargeResults.java) | +| Query Materialized View | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryMaterializedView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryMaterializedView.java) | +| Query Pagination | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryPagination.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryPagination.java) | +| Query Partitioned Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryPartitionedTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryPartitionedTable.java) | +| Query Script | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryScript.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryScript.java) | +| Query Short Mode | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryShortMode.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryShortMode.java) | +| Query Total Rows | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryTotalRows.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryTotalRows.java) | +| Query With Array Of Structs Named Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayOfStructsNamedParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayOfStructsNamedParameters.java) | +| Query With Array Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayParameters.java) | +| Query With Named Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryWithNamedParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithNamedParameters.java) | +| Query With Named Types Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryWithNamedTypesParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithNamedTypesParameters.java) | +| Query With Positional Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryWithPositionalParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithPositionalParameters.java) | +| Query With Positional Types Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryWithPositionalTypesParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithPositionalTypesParameters.java) | +| Query With Structs Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryWithStructsParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithStructsParameters.java) | +| Query With Timestamp Parameters | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java) | +| Quickstart Sample | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/QuickstartSample.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/QuickstartSample.java) | +| Relax Column Load Append | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/RelaxColumnLoadAppend.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/RelaxColumnLoadAppend.java) | +| Relax Column Mode | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/RelaxColumnMode.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/RelaxColumnMode.java) | +| Relax Table Query | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/RelaxTableQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/RelaxTableQuery.java) | +| Resource Clean Up | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java) | +| Run Legacy Query | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/RunLegacyQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/RunLegacyQuery.java) | +| Save Query To Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/SaveQueryToTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SaveQueryToTable.java) | +| Set User Agent | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/SetUserAgent.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SetUserAgent.java) | +| Simple App | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java) | +| Simple Query | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/SimpleQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SimpleQuery.java) | +| Simple Query Connection Read Api | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/SimpleQueryConnectionReadApi.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/SimpleQueryConnectionReadApi.java) | +| Table Exists | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/TableExists.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/TableExists.java) | +| Table Insert Rows | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java) | +| Table Insert Rows Without Row Ids | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/TableInsertRowsWithoutRowIds.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/TableInsertRowsWithoutRowIds.java) | +| Undelete Table | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UndeleteTable.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UndeleteTable.java) | +| Update Dataset Access | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetAccess.java) | +| Update Dataset Description | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetDescription.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetDescription.java) | +| Update Dataset Expiration | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetExpiration.java) | +| Update Dataset Partition Expiration | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetPartitionExpiration.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateDatasetPartitionExpiration.java) | +| Update Iam Policy | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateIamPolicy.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateIamPolicy.java) | +| Update Materialized View | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateMaterializedView.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateMaterializedView.java) | +| Update Model Description | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateModelDescription.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateModelDescription.java) | +| Update Routine | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateRoutine.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateRoutine.java) | +| Update Table Cmek | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateTableCmek.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateTableCmek.java) | +| Update Table Description | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateTableDescription.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateTableDescription.java) | +| Update Table Dml | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateTableDml.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateTableDml.java) | +| Update Table Expiration | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java) | +| Update Table Require Partition Filter | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateTableRequirePartitionFilter.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateTableRequirePartitionFilter.java) | +| Update View Query | [source code](https://github.com/googleapis/java-bigquery/blob/main/samples/snippets/src/main/java/com/example/bigquery/UpdateViewQuery.java) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/java-bigquery&page=editor&open_in_editor=samples/snippets/src/main/java/com/example/bigquery/UpdateViewQuery.java) | @@ -240,9 +262,49 @@ has instructions for running the samples. To get help, follow the instructions in the [shared Troubleshooting document][troubleshooting]. -## Java Versions +## Supported Java Versions + +Java 8 or above is required for using this client. + +Google's Java client libraries, +[Google Cloud Client Libraries][cloudlibs] +and +[Google Cloud API Libraries][apilibs], +follow the +[Oracle Java SE support roadmap][oracle] +(see the Oracle Java SE Product Releases section). + +### For new development + +In general, new feature development occurs with support for the lowest Java +LTS version covered by Oracle's Premier Support (which typically lasts 5 years +from initial General Availability). If the minimum required JVM for a given +library is changed, it is accompanied by a [semver][semver] major release. -Java 7 or above is required for using this client. +Java 11 and (in September 2021) Java 17 are the best choices for new +development. + +### Keeping production systems current + +Google tests its client libraries with all current LTS versions covered by +Oracle's Extended Support (which typically lasts 8 years from initial +General Availability). + +#### Legacy support + +Google's client libraries support legacy versions of Java runtimes with long +term stable libraries that don't receive feature updates on a best efforts basis +as it may not be possible to backport all patches. + +Google provides updates on a best efforts basis to apps that continue to use +Java 7, though apps might need to upgrade to current versions of the library +that supports their JVM. + +#### Where to find specific information + +The latest versions and the supported Java versions are identified on +the individual GitHub repository `github.com/GoogleAPIs/java-SERVICENAME` +and on [google-cloud-java][g-c-j]. ## Versioning @@ -250,6 +312,7 @@ Java 7 or above is required for using this client. This library follows [Semantic Versioning](http://semver.org/). + ## Contributing @@ -261,46 +324,36 @@ Please note that this project is released with a Contributor Code of Conduct. By this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more information. + ## License Apache 2.0 - See [LICENSE][license] for more information. -## CI Status - -Java Version | Status ------------- | ------ -Java 7 | [![Kokoro CI][kokoro-badge-image-1]][kokoro-badge-link-1] -Java 8 | [![Kokoro CI][kokoro-badge-image-2]][kokoro-badge-link-2] -Java 8 OSX | [![Kokoro CI][kokoro-badge-image-3]][kokoro-badge-link-3] -Java 8 Windows | [![Kokoro CI][kokoro-badge-image-4]][kokoro-badge-link-4] -Java 11 | [![Kokoro CI][kokoro-badge-image-5]][kokoro-badge-link-5] - Java is a registered trademark of Oracle and/or its affiliates. [product-docs]: https://cloud.google.com/bigquery -[javadocs]: https://googleapis.dev/java/google-cloud-bigquery/latest -[kokoro-badge-image-1]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java7.svg -[kokoro-badge-link-1]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java7.html -[kokoro-badge-image-2]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8.svg -[kokoro-badge-link-2]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8.html -[kokoro-badge-image-3]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8-osx.svg -[kokoro-badge-link-3]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8-osx.html -[kokoro-badge-image-4]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8-win.svg -[kokoro-badge-link-4]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java8-win.html -[kokoro-badge-image-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java11.svg -[kokoro-badge-link-5]: http://storage.googleapis.com/cloud-devrel-public/java/badges/java-bigquery/java11.html -[stability-image]: https://img.shields.io/badge/stability-ga-green +[javadocs]: https://cloud.google.com/java/docs/reference/google-cloud-bigquery/latest/history +[stability-image]: https://img.shields.io/badge/stability-stable-green [maven-version-image]: https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-bigquery.svg -[maven-version-link]: https://search.maven.org/search?q=g:com.google.cloud%20AND%20a:google-cloud-bigquery&core=gav +[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-bigquery/2.42.2 [authentication]: https://github.com/googleapis/google-cloud-java#authentication +[auth-scopes]: https://developers.google.com/identity/protocols/oauth2/scopes +[predefined-iam-roles]: https://cloud.google.com/iam/docs/understanding-roles#predefined_roles +[iam-policy]: https://cloud.google.com/iam/docs/overview#cloud-iam-policy [developer-console]: https://console.developers.google.com/ [create-project]: https://cloud.google.com/resource-manager/docs/creating-managing-projects -[cloud-sdk]: https://cloud.google.com/sdk/ -[troubleshooting]: https://github.com/googleapis/google-cloud-common/blob/master/troubleshooting/readme.md#troubleshooting -[contributing]: https://github.com/googleapis/java-bigquery/blob/master/CONTRIBUTING.md -[code-of-conduct]: https://github.com/googleapis/java-bigquery/blob/master/CODE_OF_CONDUCT.md#contributor-code-of-conduct -[license]: https://github.com/googleapis/java-bigquery/blob/master/LICENSE +[cloud-cli]: https://cloud.google.com/cli +[troubleshooting]: https://github.com/googleapis/google-cloud-java/blob/main/TROUBLESHOOTING.md +[contributing]: https://github.com/googleapis/java-bigquery/blob/main/CONTRIBUTING.md +[code-of-conduct]: https://github.com/googleapis/java-bigquery/blob/main/CODE_OF_CONDUCT.md#contributor-code-of-conduct +[license]: https://github.com/googleapis/java-bigquery/blob/main/LICENSE [enable-billing]: https://cloud.google.com/apis/docs/getting-started#enabling_billing [enable-api]: https://console.cloud.google.com/flows/enableapi?apiid=bigquery.googleapis.com [libraries-bom]: https://github.com/GoogleCloudPlatform/cloud-opensource-java/wiki/The-Google-Cloud-Platform-Libraries-BOM [shell_img]: https://gstatic.com/cloudssh/images/open-btn.png + +[semver]: https://semver.org/ +[cloudlibs]: https://cloud.google.com/apis/docs/client-libraries-explained +[apilibs]: https://cloud.google.com/apis/docs/client-libraries-explained#google_api_client_libraries +[oracle]: https://www.oracle.com/java/technologies/java-se-support-roadmap.html +[g-c-j]: http://github.com/googleapis/google-cloud-java diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..8b58ae9c01 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/benchmark/README.md b/benchmark/README.md index 41e9c2fdac..d1a1ae1571 100644 --- a/benchmark/README.md +++ b/benchmark/README.md @@ -19,3 +19,10 @@ To run a benchmark jar, run the following command cd benchmark java -jar target/benchmark.jar ``` + +To run ConnImplBenchmark, run the following command +``` +# Run from benchmark directory + cd benchmark + java -jar target/benchmark.jar com.google.cloud.bigquery.ConnImplBenchmark +``` diff --git a/benchmark/pom.xml b/benchmark/pom.xml index 43262bd161..0365138738 100644 --- a/benchmark/pom.xml +++ b/benchmark/pom.xml @@ -6,12 +6,12 @@ google-cloud-bigquery-parent com.google.cloud - 1.127.5-SNAPSHOT + 2.60.1-SNAPSHOT UTF-8 - 1.26 + 1.37 benchmark @@ -37,12 +37,21 @@ org.apache.maven.plugins maven-compiler-plugin - 3.8.1 + 3.14.0 + + + + org.openjdk.jmh + jmh-generator-annprocess + ${jmh.version} + + + org.apache.maven.plugins maven-shade-plugin - 3.2.4 + 3.6.0 package @@ -75,7 +84,7 @@ org.apache.maven.plugins maven-deploy-plugin - 2.8.2 + 3.1.4 true @@ -83,7 +92,7 @@ org.sonatype.plugins nexus-staging-maven-plugin - 1.6.8 + 1.7.0 true diff --git a/benchmark/src/main/java/com.google.cloud.bigquery/ConnImplBenchmark.java b/benchmark/src/main/java/com.google.cloud.bigquery/ConnImplBenchmark.java new file mode 100644 index 0000000000..eb239463fd --- /dev/null +++ b/benchmark/src/main/java/com.google.cloud.bigquery/ConnImplBenchmark.java @@ -0,0 +1,218 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import java.io.IOException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.concurrent.TimeUnit; +import java.util.function.Function; +import java.util.logging.Level; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.Warmup; +import org.openjdk.jmh.infra.Blackhole; +import org.openjdk.jmh.runner.Runner; +import org.openjdk.jmh.runner.options.Options; +import org.openjdk.jmh.runner.options.OptionsBuilder; + +@Fork(value = 1) +@BenchmarkMode(Mode.AverageTime) +@Warmup(iterations = 1) +@Measurement(iterations = 3) +@State(Scope.Benchmark) +@OutputTimeUnit(TimeUnit.MILLISECONDS) +public class ConnImplBenchmark { + @Param({"500000", "1000000", "10000000", "50000000", "100000000"}) // 500K, 1M, 10M, 50M and 100M + public int rowLimit; + + private ConnectionSettings connectionSettingsReadAPIEnabled, connectionSettingsReadAPIDisabled; + private final String QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 LIMIT %s"; + + @Setup + public void setUp() throws IOException { + java.util.logging.Logger.getGlobal().setLevel(Level.ALL); + + connectionSettingsReadAPIEnabled = ConnectionSettings.newBuilder() + .setUseReadAPI(true) + .setMaxResults(500L) + .setJobTimeoutMs(Long.MAX_VALUE) + .build(); + connectionSettingsReadAPIDisabled = ConnectionSettings.newBuilder() + .setUseReadAPI(false) + .build(); + } + + @Benchmark + public void iterateRecordsWithBigQuery_Query(Blackhole blackhole) throws InterruptedException { + String selectQuery = String.format(QUERY, rowLimit); + BigQuery bigQuery = BigQueryOptions.getDefaultInstance().getService(); + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(selectQuery).setUseLegacySql(false).build(); + TableResult result = bigQuery.query(config); + long hash = 0L; + int cnt = 0; + long lastTime = System.currentTimeMillis(); + System.out.println("\n Running"); + for (FieldValueList row : result.iterateAll()) { + hash += computeHash(row.get("vendor_id"), FieldValue::getStringValue); + hash += computeHash(row.get("pickup_datetime"), FieldValue::getStringValue); + hash += computeHash(row.get("dropoff_datetime"), FieldValue::getStringValue); + hash += computeHash(row.get("passenger_count"), FieldValue::getLongValue); + hash += computeHash(row.get("trip_distance"), FieldValue::getDoubleValue); + hash += computeHash(row.get("rate_code"), FieldValue::getStringValue); + hash += computeHash(row.get("store_and_fwd_flag"), FieldValue::getStringValue); + hash += computeHash(row.get("payment_type"), FieldValue::getStringValue); + hash += computeHash(row.get("fare_amount"), FieldValue::getDoubleValue); + hash += computeHash(row.get("extra"), FieldValue::getDoubleValue); + hash += computeHash(row.get("mta_tax"), FieldValue::getDoubleValue); + hash += computeHash(row.get("tip_amount"), FieldValue::getDoubleValue); + hash += computeHash(row.get("tolls_amount"), FieldValue::getDoubleValue); + hash += computeHash(row.get("imp_surcharge"), FieldValue::getDoubleValue); + hash += computeHash(row.get("airport_fee"), FieldValue::getDoubleValue); + hash += computeHash(row.get("total_amount"), FieldValue::getDoubleValue); + hash += computeHash(row.get("pickup_location_id"), FieldValue::getStringValue); + hash += computeHash(row.get("dropoff_location_id"), FieldValue::getStringValue); + hash += computeHash(row.get("data_file_year"), FieldValue::getLongValue); + hash += computeHash(row.get("data_file_month"), FieldValue::getLongValue); + + if (++cnt % 100_000 == 0) { + long now = System.currentTimeMillis(); + long duration = now - lastTime; + System.out.println("ROW " + cnt + " Time: " + duration + " ms"); + lastTime = now; + } + } + System.out.println(cnt + " records processed using bigquery.query"); + blackhole.consume(hash); + } + + @Benchmark + public void iterateRecordsUsingReadAPI(Blackhole blackhole) + throws InterruptedException, BigQuerySQLException { + Connection connectionReadAPIEnabled = + BigQueryOptions.getDefaultInstance() + .getService() + .createConnection(connectionSettingsReadAPIEnabled); + String selectQuery = String.format(QUERY, rowLimit); + long hash = 0L; + try { + BigQueryResult bigQueryResultSet = connectionReadAPIEnabled.executeSelect(selectQuery); + hash = getResultHash(bigQueryResultSet); + } catch (Exception e) { + e.printStackTrace(); + } finally { + connectionReadAPIEnabled.close(); // IMP to kill the bg workers + } + blackhole.consume(hash); + } + + @Benchmark + public void iterateRecordsWithoutUsingReadAPI(Blackhole blackhole) + throws InterruptedException, BigQuerySQLException { + Connection connectionReadAPIDisabled = + BigQueryOptions.getDefaultInstance() + .getService() + .createConnection(connectionSettingsReadAPIDisabled); + String selectQuery = String.format(QUERY, rowLimit); + long hash = 0L; + try { + BigQueryResult bigQueryResultSet = connectionReadAPIDisabled.executeSelect(selectQuery); + hash = getResultHash(bigQueryResultSet); + } catch (Exception e) { + e.printStackTrace(); + } finally { + connectionReadAPIDisabled.close(); // IMP to kill the bg workers + } + blackhole.consume(hash); + } + + private long getResultHash(BigQueryResult bigQueryResultSet) throws SQLException { + ResultSet rs = bigQueryResultSet.getResultSet(); + long hash = 0L; + int cnt = 0; + long lastTime = System.currentTimeMillis(); + System.out.println("\n Running"); + while (rs.next()) { + hash += computeHash(rs, "vendor_id", ResultSet::getString); + hash += computeHash(rs, "pickup_datetime", ResultSet::getLong); + hash += computeHash(rs, "dropoff_datetime", ResultSet::getLong); + hash += computeHash(rs, "passenger_count", ResultSet::getLong); + hash += computeHash(rs, "trip_distance", ResultSet::getDouble); + hash += computeHash(rs, "rate_code", ResultSet::getString); + hash += computeHash(rs, "store_and_fwd_flag", ResultSet::getString); + hash += computeHash(rs, "payment_type", ResultSet::getString); + hash += computeHash(rs, "fare_amount", ResultSet::getDouble); + hash += computeHash(rs, "extra", ResultSet::getDouble); + hash += computeHash(rs, "mta_tax", ResultSet::getDouble); + hash += computeHash(rs, "tip_amount", ResultSet::getDouble); + hash += computeHash(rs, "tolls_amount", ResultSet::getDouble); + hash += computeHash(rs, "imp_surcharge", ResultSet::getDouble); + hash += computeHash(rs, "airport_fee", ResultSet::getDouble); + hash += computeHash(rs, "total_amount", ResultSet::getDouble); + hash += computeHash(rs, "pickup_location_id", ResultSet::getString); + hash += computeHash(rs, "dropoff_location_id", ResultSet::getString); + hash += computeHash(rs, "data_file_year", ResultSet::getLong); + hash += computeHash(rs, "data_file_month", ResultSet::getLong); + + if (++cnt % 100_000 == 0) { + long now = System.currentTimeMillis(); + long duration = now - lastTime; + System.out.println("ROW " + cnt + " Time: " + duration + " ms"); + lastTime = now; + } + } + return hash; + } + + private long computeHash( + ResultSet rs, String columnName, SQLFunction extractor) { + try { + T value = extractor.apply(rs, columnName); + return (value == null) ? 0 : value.hashCode(); + } catch (SQLException e) { + return 0; + } + } + + @FunctionalInterface + private interface SQLFunction { + R apply(T t, String columnName) throws SQLException; + } + + private long computeHash(FieldValue fieldValue, Function extractor) { + if (fieldValue == null || fieldValue.isNull()) { + return 0; + } + T value = extractor.apply(fieldValue); + return (value == null) ? 0 : value.hashCode(); + } + + public static void main(String[] args) throws Exception { + Options opt = new OptionsBuilder().include(ConnImplBenchmark.class.getSimpleName()).build(); + new Runner(opt).run(); + } +} diff --git a/codecov.yaml b/codecov.yaml deleted file mode 100644 index c00182958c..0000000000 --- a/codecov.yaml +++ /dev/null @@ -1,7 +0,0 @@ ---- -codecov: - ci: - - source.cloud.google.com -coverage: - round: down - range: "50...100" \ No newline at end of file diff --git a/google-cloud-bigquery-bom/pom.xml b/google-cloud-bigquery-bom/pom.xml new file mode 100644 index 0000000000..a53be70f12 --- /dev/null +++ b/google-cloud-bigquery-bom/pom.xml @@ -0,0 +1,83 @@ + + + 4.0.0 + com.google.cloud + google-cloud-bigquery-bom + 2.60.1-SNAPSHOT + pom + + com.google.cloud + sdk-platform-java-config + 3.57.0 + + + + Google Cloud BigQuery BOM + https://github.com/googleapis/java-bigquery + + BOM for Google Cloud BigQuery + + + + Google LLC + + + + + suztomo + Tomo Suzuki + suztomo@google.com + Google LLC + + Developer + + + + + + scm:git:https://github.com/googleapis/java-bigquery.git + scm:git:git@github.com:googleapis/java-bigquery.git + https://github.com/googleapis/java-bigquery + + + + + + The Apache Software License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + + + com.google.cloud + google-cloud-bigquery + 2.60.1-SNAPSHOT + + + + + + + + org.apache.maven.plugins + maven-checkstyle-plugin + + true + + + + + + org.apache.maven.plugins + maven-site-plugin + + + false + + + + + diff --git a/google-cloud-bigquery-jdbc/Dockerfile b/google-cloud-bigquery-jdbc/Dockerfile new file mode 100644 index 0000000000..f88cc5a4bc --- /dev/null +++ b/google-cloud-bigquery-jdbc/Dockerfile @@ -0,0 +1,21 @@ +FROM gcr.io/cloud-devrel-public-resources/java11 + +ENV JDBC_DOCKER_ENV=true +RUN apt-get update && apt-get install -y zip && rm -rf /var/lib/apt/lists/* + +RUN mkdir /tst +COPY ./pom.xml /src/pom.xml +COPY ./java.header /src/java.header +COPY ./license-checks.xml /src/license-checks.xml +COPY ./google-cloud-bigquery-jdbc/pom.xml /src/google-cloud-bigquery-jdbc/pom.xml + +COPY ./google-cloud-bigquery /src/google-cloud-bigquery +COPY ./google-cloud-bigquery-bom /src/google-cloud-bigquery-bom + + +WORKDIR /src +RUN mvn install -DskipTests + +WORKDIR /src/google-cloud-bigquery-jdbc + +ENTRYPOINT [] diff --git a/google-cloud-bigquery-jdbc/Makefile b/google-cloud-bigquery-jdbc/Makefile new file mode 100644 index 0000000000..61521770c7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/Makefile @@ -0,0 +1,130 @@ +CONTAINER_NAME=jdbc +PACKAGE_DESTINATION=$(PWD)/drivers +SRC="$(PWD)/.." +skipSurefire ?= true + +# no indendation for ifndef\endif due to their evaluation before execution +.check-env: | +ifndef GOOGLE_APPLICATION_CREDENTIALS + $(error GOOGLE_APPLICATION_CREDENTIALS is required to run tests) +endif + +install: + mvn clean install + +clean: + mvn clean + +lint: + mvn com.spotify.fmt:fmt-maven-plugin:format + +unittest: | + mvn -B -ntp \ + -DtrimStackTrace=false \ + -Dclirr.skip=true \ + -Denforcer.skip=true \ + -Dtest=$(test) \ + test + +# Important: By default, this command will skip unittests. +# To include unit tests, run: make integration-test skipSurefire=false +integration-test: .check-env + mvn -B -ntp \ + -Penable-integration-tests \ + -DtrimStackTrace=false \ + -DskipSurefire=$(skipSurefire) \ + -Dclirr.skip=true \ + -Denforcer.skip=true \ + -Dit.failIfNoSpecifiedTests=false \ + -Dit.test=$(test) \ + integration-test + +unit-test-coverage: + $(MAKE) unittest + mvn -B -ntp jacoco:report + BUILD_DIR=$$(mvn -B -ntp help:evaluate -Dexpression=project.build.directory -q -DforceStdout); \ + cd $$BUILD_DIR/site && zip -r $$OLDPWD/jacoco-unittests.zip jacoco && cd $$OLDPWD + +full-coverage: .check-env + $(MAKE) integration-test skipSurefire=false test=ITBigQueryJDBCTest,ITNightlyBigQueryTest + mvn -B -ntp jacoco:report + BUILD_DIR=$$(mvn -B -ntp help:evaluate -Dexpression=project.build.directory -q -DforceStdout); \ + cd $$BUILD_DIR/site && zip -r $$OLDPWD/jacoco-full.zip jacoco && cd $$OLDPWD + +package: + mvn clean package \ + -DincludeScope=runtime \ + -Dmaven.test.skip=true + mvn dependency:copy-dependencies \ + -DincludeScope=runtime + ${MAKE} generate-dependency-list + +generate-dependency-list: + mvn -B dependency:list \ + -f pom.xml \ + -DincludeScope=runtime | grep :jar: | sed -E "s/^.* ([^: ]+):([^:]+):([^:]+):([^:]+).*/\1<\/groupId>\2<\/artifactId>\4<\/version><\/dependency>/g" > dependencies.txt + +# Commands for dockerized environments +.docker-run: | + docker run -it \ + -v $(GOOGLE_APPLICATION_CREDENTIALS):/auth/application_creds.json \ + -v "$(GOOGLE_APPLICATION_CREDENTIALS).p12":/auth/application_creds.p12 \ + -e "GOOGLE_APPLICATION_CREDENTIALS=/auth/application_creds.json" \ + -v $(SRC):/src \ + -e "SA_EMAIL=test_email" \ + -e "SA_SECRET=/auth/application_creds.json" \ + -e "SA_SECRET_P12=/auth/application_creds.p12" \ + $(CONTAINER_NAME) $(args) + +docker-build: + docker build -t $(CONTAINER_NAME) -f Dockerfile .. + +docker-session: + $(MAKE) .docker-run args="bash" + +docker-package-all-dependencies: docker-build + mkdir -p $(PACKAGE_DESTINATION) + docker run \ + -v $(SRC):/src \ + -v $(PACKAGE_DESTINATION):/pkg \ + $(CONTAINER_NAME) \ + sh -c "make package-all-dependencies && \ + cp --no-preserve=ownership /mvn/test-target/google-cloud-bigquery-jdbc-*.jar /pkg && \ + rm -f /pkg/*tests.jar" + +docker-package-all-dependencies-shaded: docker-build + mkdir -p $(PACKAGE_DESTINATION) + docker run \ + -v $(SRC):/src \ + -v $(PACKAGE_DESTINATION):/pkg \ + $(CONTAINER_NAME) \ + sh -c "make package-all-dependencies-shaded && \ + cp --no-preserve=ownership /mvn/test-target/google-cloud-bigquery-jdbc-*.jar /pkg && \ + rm -f /pkg/*tests.jar" + +docker-package: docker-build + mkdir -p $(PACKAGE_DESTINATION) + docker run \ + -v $(SRC):/src \ + -v $(PACKAGE_DESTINATION):/pkg \ + $(CONTAINER_NAME) \ + sh -c "make package && \ + mkdir -p /tmp/package && \ + cp --no-preserve=ownership /mvn/test-target/google-cloud-bigquery-jdbc-*.jar /tmp/package && \ + rm -f /tmp/package/google-cloud-bigquery-jdbc-*-all.jar && \ + rm -f /tmp/package/*tests.jar && \ + cp --no-preserve=ownership dependencies.txt /tmp/package && \ + rm dependencies.txt && \ + cp --no-preserve=ownership /mvn/test-target/dependency/*.jar /tmp/package && \ + zip -j -r /pkg/google-cloud-bigquery-jdbc-$$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout).zip /tmp/package && \ + cp --no-preserve=ownership /mvn/test-target/google-cloud-bigquery-jdbc-*-all.jar /pkg " + +docker-unittest: | + $(MAKE) .docker-run args="make unittest test=$(test)" + +docker-integration-test: .check-env + $(MAKE) .docker-run args="make integration-test test=$(test) skipSurefire=$(skipSurefire)" + +docker-coverage: + $(MAKE) .docker-run args="make unit-test-coverage" + $(MAKE) .docker-run args="make full-coverage" \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/README.MD b/google-cloud-bigquery-jdbc/README.MD new file mode 100644 index 0000000000..4c8fd93216 --- /dev/null +++ b/google-cloud-bigquery-jdbc/README.MD @@ -0,0 +1,291 @@ +# Google BigQuery JDBC Client for Java + +Java idiomatic client for [BigQuery JDBC][product-docs]. + +[![Maven][maven-version-image]][maven-version-link] +![Stability][stability-image] + +- [Product Documentation][product-docs] +- [Client Library Documentation][javadocs] + + +## Quickstart + + +If you are using Maven, add this to your pom.xml file: + +```xml + + com.google.cloud + google-cloud-bigquery-jdbc + LATEST_VERSION + +``` + +If you are using Gradle without BOM, add this to your dependencies: + +```Groovy +implementation 'com.google.cloud:google-cloud-bigquery-jdbc:LATEST_VERSION' +``` + +If you are using SBT, add this to your dependencies: + +```Scala +libraryDependencies += "com.google.cloud" % "google-cloud-bigquery-jdbc" % "LATEST_VERSION" +``` + +## Authentication + +See the [Authentication][authentication] section in the base directory's README. + +## Authorization + +The client application making API calls must be granted [authorization scopes][auth-scopes] required for the desired BigQuery JDBC APIs, and the authenticated principal must have the [IAM role(s)][predefined-iam-roles] required to access GCP resources using the BigQuery JDBC API calls. + +## Developer Guide + +### Prerequisites + +You need to have either Java with Maven installed or Docker. You might want to install [`Make`](https://www.gnu.org/software/make/) to simplify running commands, otherwise please look into Makefile to check for specific configurations. + +### Setup + +`make install` primarily relies on `mvn install` command. All following commands are primarily applicable for the `google-cloud-bigquery-jdbc` project. +You can also use `make clean` to clean the project and `make lint` to format the code. + +### Running tests + +#### Unittests + +Run all unittests + +`make unittest` + +Run specific unittests + +`make unittest test=` + +Please reference [Maven documentation](https://maven.apache.org/surefire/maven-surefire-plugin/examples/single-test.html) for details about `` + +Example: `make unittest test=BigQueryArrowStructTest` + +#### Integration tests + +IMPORTANT: Running integration tests will skip unit tests by default. To include unit tests, run `make integration-test skipSurefire=false`. Primary focus of this command is to run specific set of tests without a lot of overhead. + +Set the following environment variables to run the integration tests: + +``` +# Default gcloud auth setup +export GOOGLE_APPLICATION_CREDENTIALS= + +# Test specfic envs +export SA_EMAIL=email@email.com +export SA_SECRET= +# Alternatively it can be json content: +export SA_SECRET=`cat ` +``` + +Run all integration tests (currently takes 15+ minutes, so this is discouraged). + +`make integration-test` + +Run specific integration test + +`make integration-test test=` + +Please reference [Maven documentation](https://maven.apache.org/surefire/maven-surefire-plugin/examples/single-test.html) for details about `` + +Example: `make integration-test test=ITBigQueryJDBCTest#testValidServiceAccountAuthenticationOAuthPvtKey` + +### Dockerized environment + +If you don't have Java or Maven, or if you want to test changes with a different Java version, you can leverage dockerized environment. + +One-time run commands are similar to local development make commands: + +``` +make docker-build +make docker-unittest +make docker-integration-test +``` + +Please note that running unit or integration tests within docker doesn't leverage maven cache because it is not persisted. +If you want to run multiple commands, you can start a session and treat this shell session as your local environment. + +``` +make docker-session +``` + +All Docker commands rely only on `GOOGLE_APPLICATION_CREDENTIALS` env being present, it will create rest of env vars as needed. + +### Packaging + +There are a few ways to package the Google JDBC Driver. The output of the packaging commands can be found in the `target` directory. + +`make package` or `make docker-package` will create both a thin jar and a zip file with all dependencies. +`make package-all-dependencies` or `make docker-package-all-dependencies` will create a single jar with all dependencies included. +`make package-all-dependencies-shaded` or `make docker-package-all-dependencies-shaded` will create a single shaded jar with all dependencies included. + +#### Thin jar + +The thin jar is created with `make package`. The thin jar is packaged as a zip file with its dependencies listed in a `dependencies.txt` file, compatible with `pom.xml`. + +#### All dependencies + +The jar with all dependencies is also created. This jar includes all dependencies and can be used as a standalone jar with tools like R-Studio. You can run `make package-all-dependencies` or `make docker-package-all-dependencies` to build only the jar with all dependencies. + +#### Shaded Jar +You can also build a shaded jar with all dependencies. This can be done by running `make package-all-dependencies-shaded` or `make docker-package-all-dependencies-shaded`. + +### Nightly builds + +The nightly build runs the full set of integration tests, including the `ITBigQueryJDBCTest` and `ITNightlyBigQueryTest` test suites. It also includes some long-running tests (takes 20+ minutes to complete). + +**Note:** These builds are intended for testing and development purposes only and are not recommended for production use. + +Nightly Integration Tests include a step to build a full and thin jars and upload them to Google Storage. + +They can be retrieved via following commands: + +``` +gsutil cp gs://bq_devtools_release_private/drivers/jdbc/google-cloud-bigquery-jdbc-latest-all.jar . +gsutil cp gs://bq_devtools_release_private/drivers/jdbc/google-cloud-bigquery-jdbc-latest.zip . +``` + +#### Performance tests + +Cloud Build Pipeline is uploading latest full jar to the internal location for perf tests once a week. + +### Code Coverage + +We're using [JaCoCo](https://www.eclemma.org/jacoco/) to track Code Coverage. `Makefile` has 2 separate set of commands for unittests and integration tests reports. + +You can run `make unit-test-coverage` to generate a coverage report for unit tests. The output will be in `jacoco-unittests.zip`. +You can run `make full-coverage` to generate a coverage report for both unit and integration tests. The output will be in `jacoco-full.zip`. + +You can also run `make docker-coverage` which will produce both results. You can find `jacoco-unittests.zip` and `jacoco-full.zip` files in the root with results. + +## Getting Started + +### Prerequisites + +You will need a [Google Cloud Platform Console][developer-console] project with the BigQuery JDBC [API enabled][enable-api]. + +[Follow these instructions][create-project] to get your project set up. You will also need to set up the local development environment by +[installing the Google Cloud Command Line Interface][cloud-cli] and running the following commands in command line: +`gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`. + +### Installation and setup + +You'll need to obtain the `google-cloud-bigquery-jdbc` library. See the [Quickstart](#quickstart) section +to add `google-cloud-bigquery-jdbc` as a dependency in your code. + +## About BigQuery JDBC + + +[BigQuery JDBC][product-docs] + +See the [BigQuery JDBC client library docs][javadocs] to learn how to +use this BigQuery JDBC Client Library. + + + + + + +## Troubleshooting + +To get help, follow the instructions in the [shared Troubleshooting document][troubleshooting]. + +## Supported Java Versions + +Java 8 or above is required for using this client. + +Google's Java client libraries, +[Google Cloud Client Libraries][cloudlibs] +and +[Google Cloud API Libraries][apilibs], +follow the +[Oracle Java SE support roadmap][oracle] +(see the Oracle Java SE Product Releases section). + +### For new development + +In general, new feature development occurs with support for the lowest Java +LTS version covered by Oracle's Premier Support (which typically lasts 5 years +from initial General Availability). If the minimum required JVM for a given +library is changed, it is accompanied by a [semver][semver] major release. + +Java 11 and (in September 2021) Java 17 are the best choices for new +development. + +### Keeping production systems current + +Google tests its client libraries with all current LTS versions covered by +Oracle's Extended Support (which typically lasts 8 years from initial +General Availability). + +#### Legacy support + +Google's client libraries support legacy versions of Java runtimes with long +term stable libraries that don't receive feature updates on a best efforts basis +as it may not be possible to backport all patches. + +Google provides updates on a best efforts basis to apps that continue to use +Java 7, though apps might need to upgrade to current versions of the library +that supports their JVM. + +#### Where to find specific information + +The latest versions and the supported Java versions are identified on +the individual GitHub repository `github.com/GoogleAPIs/java-SERVICENAME` +and on [google-cloud-java][g-c-j]. + +## Versioning + + +This library follows [Semantic Versioning](http://semver.org/). + + + +## Contributing + + +Contributions to this library are always welcome and highly encouraged. + +See [CONTRIBUTING][contributing] for more information how to get started. + +Please note that this project is released with a Contributor Code of Conduct. By participating in +this project you agree to abide by its terms. See [Code of Conduct][code-of-conduct] for more +information. + + +## License + +Apache 2.0 - See [LICENSE][license] for more information. + +Java is a registered trademark of Oracle and/or its affiliates. + +[product-docs]: https://cloud.google.com/bigquery +[javadocs]: https://cloud.google.com/java/docs/reference/google-cloud-bigquery/latest/history +[stability-image]: https://img.shields.io/badge/stability-unknown-red +[maven-version-image]: https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-bigquery-jdbc.svg +[maven-version-link]: https://central.sonatype.com/artifact/com.google.cloud/google-cloud-bigquery-jdbc/0.0.0 +[authentication]: https://github.com/googleapis/google-cloud-java#authentication +[auth-scopes]: https://developers.google.com/identity/protocols/oauth2/scopes +[predefined-iam-roles]: https://cloud.google.com/iam/docs/understanding-roles#predefined_roles +[iam-policy]: https://cloud.google.com/iam/docs/overview#cloud-iam-policy +[developer-console]: https://console.developers.google.com/ +[create-project]: https://cloud.google.com/resource-manager/docs/creating-managing-projects +[cloud-cli]: https://cloud.google.com/cli +[troubleshooting]: https://github.com/googleapis/google-cloud-java/blob/main/TROUBLESHOOTING.md +[contributing]: https://github.com/googleapis/java-bigquery/blob/main/CONTRIBUTING.md +[code-of-conduct]: https://github.com/googleapis/java-bigquery/blob/main/CODE_OF_CONDUCT.md +[license]: https://github.com/googleapis/java-bigquery/blob/main/LICENSE + +[semver]: https://semver.org/ +[cloudlibs]: https://cloud.google.com/apis/docs/client-libraries-explained +[apilibs]: https://cloud.google.com/apis/docs/client-libraries-explained#google_api_client_libraries +[oracle]: https://www.oracle.com/java/technologies/java-se-support-roadmap.html +[g-c-j]: http://github.com/googleapis/google-cloud-java diff --git a/google-cloud-bigquery-jdbc/pom.xml b/google-cloud-bigquery-jdbc/pom.xml new file mode 100644 index 0000000000..2f225b6d87 --- /dev/null +++ b/google-cloud-bigquery-jdbc/pom.xml @@ -0,0 +1,332 @@ + + + + 4.0.0 + com.google.cloud + google-cloud-bigquery-jdbc + 0.4.1-SNAPSHOT + jar + BigQuery JDBC + https://github.com/googleapis/java-bigquery-jdbc + JDBC for BigQuery + + + UTF-8 + UTF-8 + github + google-cloud-bigquery-jdbc + + + + + + + src/main/resources + true + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.5.2 + + ${skipSurefire} + + true + + + + + org.apache.maven.plugins + maven-failsafe-plugin + + + true + + + + + org.jacoco + jacoco-maven-plugin + 0.8.13 + + + org.apache.maven.plugins + maven-dependency-plugin + + + com.google.*:* + org.apache.arrow:* + org.apache.httpcomponents.*:* + io.grpc:* + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.2 + + + package + + shade + + + true + all + false + + + java.base/java.nio=ALL-UNNAMED + + + + + + + META-INF/io.netty.versions.properties + + + + + com + com.google.bqjdbc.shaded.com + + com.google.cloud.bigquery.jdbc.* + + + + org + com.google.bqjdbc.shaded.org + + org.conscrypt.* + + + + io + com.google.bqjdbc.shaded.io + + + + + *:* + + META-INF/LICENSE* + META-INF/NOTICE* + META-INF/DEPENDENCIES + META-INF/proguard/*.pro + META-INF/maven/** + META-INF/*.MF + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + arrow-git.properties + + + + + + + + + + + + com.google.cloud + google-cloud-bigquery-parent + 2.60.1-SNAPSHOT + + + + com.google.cloud + google-cloud-bigquery + + + com.google.cloud + google-cloud-bigquerystorage + + + com.google.http-client + google-http-client-apache-v5 + + + org.apache.httpcomponents + httpcore + + + org.apache.httpcomponents + httpclient + + + + + + + com.google.api + api-common + + + com.google.api + gax + + + com.google.api + gax-grpc + + + com.google.api.grpc + proto-google-cloud-bigquerystorage-v1 + + + + com.google.auth + google-auth-library-oauth2-http + + + com.google.auth + google-auth-library-credentials + + + + com.google.cloud + google-cloud-core + + + com.google.cloud + google-cloud-core-http + + + + com.google.code.findbugs + jsr305 + + + com.google.code.gson + gson + + + + com.google.guava + guava + + + + com.google.protobuf + protobuf-java + + + com.google.http-client + google-http-client + + + + io.grpc + grpc-api + + + io.grpc + grpc-core + + + io.grpc + grpc-netty-shaded + + + + + org.apache.arrow + arrow-vector + + + org.apache.arrow + arrow-memory-core + + + org.apache.httpcomponents.client5 + httpclient5 + + + org.apache.httpcomponents.core5 + httpcore5 + + + + + com.google.truth + truth + 1.1.3 + test + + + junit + junit + 4.13.2 + test + + + org.mockito + mockito-core + 4.11.0 + test + + + + + + java17 + + [17,) + + + !jvm + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + --add-opens=java.base/java.nio=org.apache.arrow.memory.core,ALL-UNNAMED + + + + + + + + + docker + + + env.JDBC_DOCKER_ENV + + + + + /mvn/test-target + + + + \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryConversionException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryConversionException.java new file mode 100644 index 0000000000..90e758b05e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryConversionException.java @@ -0,0 +1,29 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import java.sql.SQLException; + +/** + * Exception for errors that occur when the driver cannot convert a value from one type to another. + */ +public class BigQueryConversionException extends SQLException { + + public BigQueryConversionException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionException.java new file mode 100644 index 0000000000..185ef54bb1 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionException.java @@ -0,0 +1,36 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.api.core.InternalApi; + +/** + * Thrown to indicate that the coercion was attempted but couldn't be performed successfully because + * of some error. + */ +@InternalApi +public class BigQueryJdbcCoercionException extends RuntimeException { + + /** + * Construct a new exception with the specified cause. + * + * @param cause the actual cause which was thrown while performing the coercion. + */ + public BigQueryJdbcCoercionException(Exception cause) { + super("Coercion error", cause); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionNotFoundException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionNotFoundException.java new file mode 100644 index 0000000000..b4eafb2ee5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcCoercionNotFoundException.java @@ -0,0 +1,40 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.api.core.InternalApi; + +/** + * Thrown to indicate that the current TypeCoercer can not perform the coercion as the Coercion + * implementation is not registered for the mentioned source and target type. + */ +@InternalApi +public class BigQueryJdbcCoercionNotFoundException extends RuntimeException { + + /** + * Construct a new exception. + * + * @param source the source type. + * @param target the target type. + */ + public BigQueryJdbcCoercionNotFoundException(Class source, Class target) { + super( + String.format( + "Coercion not found for [%s -> %s] conversion", + source.getCanonicalName(), target.getCanonicalName())); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcException.java new file mode 100644 index 0000000000..72a22aba61 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcException.java @@ -0,0 +1,76 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.cloud.bigquery.BigQueryException; +import java.sql.SQLException; + +public class BigQueryJdbcException extends SQLException { + private BigQueryException bigQueryException = null; + + /** + * Constructs a new BigQueryJdbcException with the given message. + * + * @param message The detail message. + */ + public BigQueryJdbcException(String message) { + super(message); + } + + /** + * Constructs a new BigQueryJdbcException from InterruptedException + * + * @param ex The InterruptedException to be thrown. + */ + public BigQueryJdbcException(InterruptedException ex) { + super(ex); + } + + /** + * Constructs a new BigQueryJdbcException from BigQueryException + * + * @param ex The BigQueryException to be thrown. + */ + public BigQueryJdbcException(BigQueryException ex) { + super(ex); + this.bigQueryException = ex; + } + + /** + * Construct a new BigQueryJdbcException with the cause. + * + * @param message Specific message that is being added to the Exception. + * @param cause Throwable that is being converted. + */ + public BigQueryJdbcException(String message, Throwable cause) { + super(message, cause); + } + + /** + * Constructs a new BigQueryJdbcException with the specified cause and a detail message of + * (cause==null ? null : cause.toString()) + * + * @param cause Throwable that is being converted. + */ + public BigQueryJdbcException(Throwable cause) { + super(cause); + } + + public BigQueryException getBigQueryException() { + return bigQueryException; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcRuntimeException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcRuntimeException.java new file mode 100644 index 0000000000..38e5171be4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcRuntimeException.java @@ -0,0 +1,48 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +public class BigQueryJdbcRuntimeException extends RuntimeException { + + /** + * Constructs a new BigQueryJdbcRuntimeException with the given message. + * + * @param message The detail message. + */ + public BigQueryJdbcRuntimeException(String message) { + super(message); + } + + /** + * Constructs a new BigQueryJdbcRuntimeException from a Throwable exception. + * + * @param ex Throwable to be thrown. + */ + public BigQueryJdbcRuntimeException(Throwable ex) { + super(ex); + } + + /** + * Constructs a new BigQueryJdbcRuntimeException from a Throwable exception and a message. + * + * @param message The detail message. + * @param ex Throwable to be thrown. + */ + public BigQueryJdbcRuntimeException(String message, InterruptedException ex) { + super(message, ex); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlFeatureNotSupportedException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlFeatureNotSupportedException.java new file mode 100644 index 0000000000..8c93d8764b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlFeatureNotSupportedException.java @@ -0,0 +1,40 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.cloud.bigquery.BigQueryException; +import java.sql.SQLFeatureNotSupportedException; + +public class BigQueryJdbcSqlFeatureNotSupportedException extends SQLFeatureNotSupportedException { + /** + * Constructs a new BigQueryJdbcSqlFeatureNotSupportedException with the given message. + * + * @param message The detail message. + */ + public BigQueryJdbcSqlFeatureNotSupportedException(String message) { + super(message); + } + + /** + * Constructs a new BigQueryJdbcSqlFeatureNotSupportedException from BigQueryException + * + * @param ex The BigQueryException to be thrown. + */ + public BigQueryJdbcSqlFeatureNotSupportedException(BigQueryException ex) { + super(ex); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlSyntaxErrorException.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlSyntaxErrorException.java new file mode 100644 index 0000000000..99edcd0c54 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/exception/BigQueryJdbcSqlSyntaxErrorException.java @@ -0,0 +1,36 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.exception; + +import com.google.cloud.bigquery.BigQueryException; +import java.sql.SQLSyntaxErrorException; + +/** + * Specific {@link SQLSyntaxErrorException} thrown when the SQLState class value is '42', or under + * vendor-specified conditions. This indicates that the in-progress query has violated SQL syntax + * rules. + */ +public class BigQueryJdbcSqlSyntaxErrorException extends SQLSyntaxErrorException { + /** + * Constructs a new BigQueryJdbcSqlSyntaxErrorException from BigQueryException + * + * @param ex The BigQueryException to be thrown. + */ + public BigQueryJdbcSqlSyntaxErrorException(BigQueryException ex) { + super(ex.getMessage(), "Incorrect SQL syntax."); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArray.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArray.java new file mode 100644 index 0000000000..49bd565df7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArray.java @@ -0,0 +1,105 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import java.sql.ResultSet; +import java.sql.SQLException; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +/** + * An implementation of {@link BigQueryBaseArray} used to represent Array values from Arrow data. + */ +class BigQueryArrowArray extends BigQueryBaseArray { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryArrowArray.class.getName()); + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + private JsonStringArrayList values; + + public BigQueryArrowArray(Field schema, JsonStringArrayList values) { + super(schema); + this.values = values; + } + + @Override + public Object getArray() { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return null; + } + return getArrayInternal(0, values.size()); + } + + @Override + public Object getArray(long index, int count) { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return null; + } + Tuple range = createRange(index, count, this.values.size()); + return getArrayInternal(range.x(), range.y()); + } + + @Override + public ResultSet getResultSet() throws SQLException { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return new BigQueryArrowResultSet(); + } + BigQueryArrowBatchWrapper arrowBatchWrapper = + BigQueryArrowBatchWrapper.getNestedFieldValueListWrapper(values); + return BigQueryArrowResultSet.getNestedResultSet( + Schema.of(singleElementSchema()), arrowBatchWrapper, 0, this.values.size()); + } + + @Override + public ResultSet getResultSet(long index, int count) throws SQLException { + LOG.finest("++enter++"); + ensureValid(); + if (values == null) { + return new BigQueryArrowResultSet(); + } + Tuple range = createRange(index, count, this.values.size()); + BigQueryArrowBatchWrapper arrowBatchWrapper = + BigQueryArrowBatchWrapper.getNestedFieldValueListWrapper(values); + return BigQueryArrowResultSet.getNestedResultSet( + Schema.of(singleElementSchema()), arrowBatchWrapper, range.x(), range.y()); + } + + @Override + public void free() { + LOG.finest("++enter++"); + this.values = null; + markInvalid(); + } + + @Override + Object getCoercedValue(int index) { + LOG.finest("++enter++"); + Object value = this.values.get(index); + return this.arrayOfStruct + ? new BigQueryArrowStruct(schema.getSubFields(), (JsonStringHashMap) value) + : BIGQUERY_TYPE_COERCER.coerceTo(getTargetClass(), value); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowBatchWrapper.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowBatchWrapper.java new file mode 100644 index 0000000000..99781ff16b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowBatchWrapper.java @@ -0,0 +1,83 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import org.apache.arrow.vector.util.JsonStringArrayList; + +/** This class acts as a facade layer and wraps Arrow's VectorSchemaRoot & JsonStringArrayList */ +class BigQueryArrowBatchWrapper { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryArrowBatchWrapper.class.getName()); + // Reference to the current arrowBatch + private final ArrowRecordBatch currentArrowBatch; + // Reference to the nested Records, set as null otherwise (Arrays) + private final JsonStringArrayList nestedRecords; + + // Marks the end of the stream for the ResultSet + private final boolean isLast; + + private final Exception exception; + + private BigQueryArrowBatchWrapper( + ArrowRecordBatch currentArrowBatch, + JsonStringArrayList nestedRecords, + boolean isLast, + Exception exception) { + this.currentArrowBatch = currentArrowBatch; + this.nestedRecords = nestedRecords; + this.isLast = isLast; + this.exception = exception; + } + + static BigQueryArrowBatchWrapper of(ArrowRecordBatch currentArrowBatch, boolean... isLast) { + LOG.finest("++enter++"); + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryArrowBatchWrapper(currentArrowBatch, null, isLastFlag, null); + } + + static BigQueryArrowBatchWrapper getNestedFieldValueListWrapper( + JsonStringArrayList nestedRecords, boolean... isLast) { + LOG.finest("++enter++"); + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryArrowBatchWrapper(null, nestedRecords, isLastFlag, null); + } + + static BigQueryArrowBatchWrapper ofError(Exception exception) { + LOG.finest("++enter++"); + return new BigQueryArrowBatchWrapper(null, null, true, exception); + } + + ArrowRecordBatch getCurrentArrowBatch() { + LOG.finest("++enter++"); + return this.currentArrowBatch; + } + + JsonStringArrayList getNestedRecords() { + LOG.finest("++enter++"); + return this.nestedRecords; + } + + boolean isLast() { + LOG.finest("++enter++"); + return this.isLast; + } + + Exception getException() { + return this.exception; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSet.java new file mode 100644 index 0000000000..1d7d89e3f1 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSet.java @@ -0,0 +1,496 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; +import static com.google.cloud.bigquery.jdbc.BigQueryBaseStruct.isStruct; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import java.io.IOException; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.VectorLoader; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.ipc.ReadChannel; +import org.apache.arrow.vector.ipc.message.MessageSerializer; +import org.apache.arrow.vector.util.ByteArrayReadableSeekableByteChannel; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +/** {@link ResultSet} Implementation for Arrow datasource (Using Storage Read APIs) */ +class BigQueryArrowResultSet extends BigQueryBaseResultSet { + private final long totalRows; + // count of rows read by the current instance of ResultSet + private long rowCount = 0; + // IMP: This is a buffer of Arrow batches, the max size should be kept at min as + // possible to avoid holding too much memory + private final BlockingQueue buffer; + + // TODO(neenu): See if it makes sense to have the nested batch represented by + // 'JsonStringArrayList' directly + // points to the nested batch of arrow record + private final BigQueryArrowBatchWrapper currentNestedBatch; + private final int fromIndex; + private final int toIndexExclusive; + + // Acts as a cursor, resets to -1 when the `currentBatch` is processed. points to a + // logical row in the columnar BigQueryBigQueryArrowBatchWrapper currentBatch + private int currentBatchRowIndex = -1; + private boolean hasReachedEnd = false; + + // Tracks the index of the nested element under process + private int nestedRowIndex; + + private boolean afterLast = false; + + private ArrowDeserializer arrowDeserializer; + BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); + // Decoder object will be reused to avoid re-allocation and too much garbage collection. + private VectorSchemaRoot vectorSchemaRoot; + private VectorLoader vectorLoader; + // producer thread's reference + private final Thread ownedThread; + + private BigQueryArrowResultSet( + Schema schema, + ArrowSchema arrowSchema, + long totalRows, + BigQueryStatement statement, + BlockingQueue buffer, + BigQueryArrowBatchWrapper currentNestedBatch, + boolean isNested, + int fromIndex, + int toIndexExclusive, + Thread ownedThread, + BigQuery bigQuery) + throws SQLException { + super(bigQuery, statement, schema, isNested); + LOG.finest("++enter++"); + this.totalRows = totalRows; + this.buffer = buffer; + this.currentNestedBatch = currentNestedBatch; + this.fromIndex = fromIndex; + this.toIndexExclusive = toIndexExclusive; + this.nestedRowIndex = fromIndex - 1; + this.ownedThread = ownedThread; + if (!isNested && arrowSchema != null) { + try { + this.arrowDeserializer = new ArrowDeserializer(arrowSchema); + } catch (IOException ex) { + throw new BigQueryJdbcException(ex); + } + } + } + + /** + * This method returns an instance of BigQueryArrowResultSet after adding it in the list of + * ArrowResultSetFinalizer + * + * @return BigQueryArrowResultSet + */ + static BigQueryArrowResultSet of( + Schema schema, + ArrowSchema arrowSchema, + long totalRows, + BigQueryStatement statement, + BlockingQueue buffer, + Thread ownedThread, + BigQuery bigQuery) + throws SQLException { + return new BigQueryArrowResultSet( + schema, + arrowSchema, + totalRows, + statement, + buffer, + null, + false, + -1, + -1, + ownedThread, + bigQuery); + } + + BigQueryArrowResultSet() throws SQLException { + super(null, null, null, false); + this.totalRows = 0; + this.buffer = null; + this.currentNestedBatch = null; + this.fromIndex = 0; + this.toIndexExclusive = 0; + this.ownedThread = null; + this.arrowDeserializer = null; + this.vectorSchemaRoot = null; + this.vectorLoader = null; + } + + static BigQueryArrowResultSet getNestedResultSet( + Schema schema, BigQueryArrowBatchWrapper nestedBatch, int fromIndex, int toIndexExclusive) + throws SQLException { + return new BigQueryArrowResultSet( + schema, null, -1, null, null, nestedBatch, true, fromIndex, toIndexExclusive, null, null); + } + + private class ArrowDeserializer implements AutoCloseable { + + /* Decoder object will be reused to avoid re-allocation and too much garbage collection. */ + private ArrowDeserializer(ArrowSchema arrowSchema) throws IOException { + org.apache.arrow.vector.types.pojo.Schema schema = + MessageSerializer.deserializeSchema( + new org.apache.arrow.vector.ipc.ReadChannel( + new ByteArrayReadableSeekableByteChannel( + arrowSchema.getSerializedSchema().toByteArray()))); + List vectors = new ArrayList<>(); + List fields = schema.getFields(); + for (org.apache.arrow.vector.types.pojo.Field field : fields) { + vectors.add(field.createVector(allocator)); + } + vectorSchemaRoot = new VectorSchemaRoot(vectors); + vectorLoader = new VectorLoader(vectorSchemaRoot); + } + + private void deserializeArrowBatch(ArrowRecordBatch batch) throws SQLException { + LOG.finest("++enter++"); + try { + if (vectorSchemaRoot != null) { + // Clear vectorSchemaRoot before populating a new batch + vectorSchemaRoot.clear(); + } + org.apache.arrow.vector.ipc.message.ArrowRecordBatch deserializedBatch = + MessageSerializer.deserializeRecordBatch( + new ReadChannel( + new ByteArrayReadableSeekableByteChannel( + batch.getSerializedRecordBatch().toByteArray())), + allocator); + + vectorLoader.load(deserializedBatch); + // Release buffers from batch (they are still held in the vectors in root). + deserializedBatch.close(); + } catch (RuntimeException | IOException ex) { + throw new BigQueryJdbcException(ex); + } + } + + @Override + public void close() { + LOG.finest("++enter++"); + vectorSchemaRoot.close(); + allocator.close(); + } + } + + @Override + public boolean next() throws SQLException { + checkClosed(); + if (this.isNested) { + if (this.currentNestedBatch == null || this.currentNestedBatch.getNestedRecords() == null) { + throw new IllegalStateException( + "currentNestedBatch/JsonStringArrayList can not be null working with the nested record"); + } + if (this.nestedRowIndex < (this.toIndexExclusive - 1)) { + /* Check if there's a next record in the array which can be read */ + this.nestedRowIndex++; + return true; + } + this.afterLast = true; + return false; + } else { + /* Non nested */ + if (this.hasReachedEnd || this.isLast()) { + this.afterLast = true; + return false; + } + try { + if (this.currentBatchRowIndex == -1 + || this.currentBatchRowIndex == (this.vectorSchemaRoot.getRowCount() - 1)) { + /* Start of iteration or we have exhausted the current batch */ + // Advance the cursor. Potentially blocking operation. + BigQueryArrowBatchWrapper batchWrapper = this.buffer.take(); + if (batchWrapper.getException() != null) { + throw new BigQueryJdbcRuntimeException(batchWrapper.getException()); + } + if (batchWrapper.isLast()) { + /* Marks the end of the records */ + if (this.vectorSchemaRoot != null) { + // IMP: To avoid memory leak: clear vectorSchemaRoot as it still holds + // the last batch + this.vectorSchemaRoot.clear(); + } + this.hasReachedEnd = true; + this.rowCount++; + return false; + } + // Valid batch, process it + ArrowRecordBatch arrowBatch = batchWrapper.getCurrentArrowBatch(); + // Populates vectorSchemaRoot + this.arrowDeserializer.deserializeArrowBatch(arrowBatch); + // Pointing to the first row in this fresh batch + this.currentBatchRowIndex = 0; + this.rowCount++; + return true; + } + // There are rows left in the current batch. + else if (this.currentBatchRowIndex < this.vectorSchemaRoot.getRowCount()) { + this.currentBatchRowIndex++; + this.rowCount++; + return true; + } + } catch (InterruptedException | SQLException ex) { + throw new BigQueryJdbcException( + "Error occurred while advancing the cursor. This could happen when connection is closed while the next method is being called.", + ex); + } + } + return false; + } + + private Object getObjectInternal(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + Object value; + if (this.isNested) { + // BigQuery doesn't support multidimensional arrays, so + // just the default row num column (1) and the actual column (2) is supposed to be read + if (!(columnIndex == 1 || columnIndex == 2)) { + + throw new IllegalArgumentException( + "Column index is required to be 1 or 2 for nested arrays"); + } + if (this.currentNestedBatch.getNestedRecords() == null) { + throw new IllegalStateException("JsonStringArrayList cannot be null for nested records."); + } + // For Arrays the first column is Index, ref: + // https://docs.oracle.com/javase/7/docs/api/java/sql/Array.html#getResultSet() + if (columnIndex == 1) { + return this.nestedRowIndex + 1; + } + // columnIndex = 2, return the data against the current nestedRowIndex + else { + value = this.currentNestedBatch.getNestedRecords().get(this.nestedRowIndex); + } + } else { + // get the current column + // SQL index to Java Index + FieldVector currentColumn = this.vectorSchemaRoot.getVector(columnIndex - 1); + // get the current row + value = currentColumn.getObject(this.currentBatchRowIndex); + } + setWasNull(value); + return value; + } + + @Override + public Object getObject(int columnIndex) throws SQLException { + + // columnIndex is SQL index starting at 1 + LOG.finest("++enter++"); + checkClosed(); + Object value = getObjectInternal(columnIndex); + if (value == null) { + return null; + } + + if (this.isNested && columnIndex == 1) { + return this.bigQueryTypeCoercer.coerceTo(Integer.class, value); + } + + if (this.isNested && columnIndex == 2) { + Field arrayField = this.schema.getFields().get(0); + if (isStruct(arrayField)) { + return new BigQueryArrowStruct(arrayField.getSubFields(), (JsonStringHashMap) value); + } + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + arrayField.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + + int fieldIndex = this.isNested ? 0 : columnIndex - 1; + Field fieldSchema = this.schemaFieldList.get(fieldIndex); + if (isArray(fieldSchema)) { + JsonStringArrayList originalList = (JsonStringArrayList) value; + StandardSQLTypeName elementTypeName = fieldSchema.getType().getStandardType(); + if (elementTypeName == StandardSQLTypeName.NUMERIC + || elementTypeName == StandardSQLTypeName.BIGNUMERIC) { + JsonStringArrayList newList = new JsonStringArrayList<>(); + for (Object item : originalList) { + if (item != null) { + newList.add(((BigDecimal) item).stripTrailingZeros()); + } else { + newList.add(null); + } + } + return new BigQueryArrowArray(fieldSchema, newList); + } else if (elementTypeName == StandardSQLTypeName.RANGE) { + JsonStringArrayList newList = new JsonStringArrayList<>(); + for (Object item : originalList) { + if (item != null) { + JsonStringHashMap rangeMap = (JsonStringHashMap) item; + Object start = rangeMap.get("start"); + Object end = rangeMap.get("end"); + + Object representativeElement = (start != null) ? start : end; + StandardSQLTypeName rangeElementType = getElementTypeFromValue(representativeElement); + + String formattedStart = formatRangeElement(start, rangeElementType); + String formattedEnd = formatRangeElement(end, rangeElementType); + + newList.add(String.format("[%s, %s)", formattedStart, formattedEnd)); + } else { + newList.add(null); + } + } + return new BigQueryArrowArray(fieldSchema, newList); + } + return new BigQueryArrowArray(fieldSchema, originalList); + } else if (isStruct(fieldSchema)) { + return new BigQueryArrowStruct(fieldSchema.getSubFields(), (JsonStringHashMap) value); + } else if (fieldSchema.getType().getStandardType() == StandardSQLTypeName.RANGE) { + JsonStringHashMap rangeMap = (JsonStringHashMap) value; + Object start = rangeMap.get("start"); + Object end = rangeMap.get("end"); + + Object representativeElement = (start != null) ? start : end; + StandardSQLTypeName elementType = getElementTypeFromValue(representativeElement); + + String formattedStart = formatRangeElement(start, elementType); + String formattedEnd = formatRangeElement(end, elementType); + + return String.format("[%s, %s)", formattedStart, formattedEnd); + } else { + if ((fieldSchema.getType().getStandardType() == StandardSQLTypeName.NUMERIC + || fieldSchema.getType().getStandardType() == StandardSQLTypeName.BIGNUMERIC) + && value instanceof BigDecimal) { + // The Arrow DecimalVector may return a BigDecimal with a larger scale than necessary. + // Strip trailing zeros to match JSON API and CLI output + return ((BigDecimal) value).stripTrailingZeros(); + } + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + fieldSchema.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + } + + private StandardSQLTypeName getElementTypeFromValue(Object element) { + if (element == null) { + return StandardSQLTypeName.STRING; + } + if (element instanceof Integer) { + return StandardSQLTypeName.DATE; + } + if (element instanceof Long) { + return StandardSQLTypeName.TIMESTAMP; + } + if (element instanceof LocalDateTime) { + return StandardSQLTypeName.DATETIME; + } + return StandardSQLTypeName.STRING; + } + + private String formatRangeElement(Object element, StandardSQLTypeName elementType) { + if (element == null) { + return "UNBOUNDED"; + } + switch (elementType) { + case DATE: + // Arrow gives DATE as an Integer (days since epoch) + Date date = this.bigQueryTypeCoercer.coerceTo(Date.class, (Integer) element); + return date.toString(); + case DATETIME: + // Arrow gives DATETIME as a LocalDateTime + Timestamp dtTs = + this.bigQueryTypeCoercer.coerceTo(Timestamp.class, (LocalDateTime) element); + return this.bigQueryTypeCoercer.coerceTo(String.class, dtTs); + case TIMESTAMP: + // Arrow gives TIMESTAMP as a Long (microseconds since epoch) + Timestamp ts = this.bigQueryTypeCoercer.coerceTo(Timestamp.class, (Long) element); + return this.bigQueryTypeCoercer.coerceTo(String.class, ts); + default: + // Fallback for any other unexpected type + return element.toString(); + } + } + + @Override + public void close() { + LOG.fine("Closing BigqueryArrowResultSet %s.", this); + this.isClosed = true; + if (ownedThread != null && !ownedThread.isInterrupted()) { + // interrupt the producer thread when result set is closed + ownedThread.interrupt(); + } + super.close(); + } + + @Override + public boolean isBeforeFirst() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (this.isNested) { + return this.nestedRowIndex < this.fromIndex; + } else { + return this.rowCount == 0; + } + } + + @Override + public boolean isAfterLast() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + return this.afterLast; + } + + @Override + public boolean isFirst() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (this.isNested) { + return this.nestedRowIndex == this.fromIndex; + } else { + return this.rowCount == 1; + } + } + + @Override + public boolean isLast() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (this.isNested) { + return this.nestedRowIndex == this.toIndexExclusive - 1; + } else { + return this.rowCount == this.totalRows; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStruct.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStruct.java new file mode 100644 index 0000000000..33befe902b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStruct.java @@ -0,0 +1,87 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import java.lang.reflect.Array; +import java.util.ArrayList; +import java.util.List; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +/** + * An implementation of {@link BigQueryBaseStruct} used to represent Struct values from Arrow data. + */ +class BigQueryArrowStruct extends BigQueryBaseStruct { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryArrowStruct.class.getName()); + + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + + private final FieldList schema; + + private final JsonStringHashMap values; + + BigQueryArrowStruct(FieldList schema, JsonStringHashMap values) { + this.schema = schema; + this.values = values; + } + + @Override + FieldList getSchema() { + return this.schema; + } + + @Override + public Object[] getAttributes() { + LOG.finest("++enter++"); + int size = this.schema.size(); + Object[] attributes = (Object[]) Array.newInstance(Object.class, size); + + if (this.values == null) { + return attributes; + } + List structValues = new ArrayList<>(this.values.values()); + + for (int index = 0; index < size; index++) { + Field currentSchema = this.schema.get(index); + Object currentValue = structValues.get(index); + Object coercedValue = getValue(currentSchema, currentValue); + Array.set(attributes, index, coercedValue); + } + return attributes; + } + + private Object getValue(Field currentSchema, Object currentValue) { + LOG.finest("++enter++"); + if (isArray(currentSchema)) { + return new BigQueryArrowArray(currentSchema, (JsonStringArrayList) currentValue); + } else if (isStruct(currentSchema)) { + return new BigQueryArrowStruct( + currentSchema.getSubFields(), (JsonStringHashMap) currentValue); + } else { + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + currentSchema.getType().getStandardType()); + return BIGQUERY_TYPE_COERCER.coerceTo(targetClass, currentValue); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseArray.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseArray.java new file mode 100644 index 0000000000..5fc2c15bbe --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseArray.java @@ -0,0 +1,172 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.Field.Mode.REPEATED; +import static com.google.cloud.bigquery.jdbc.BigQueryBaseStruct.isStruct; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.lang.reflect.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Struct; +import java.util.Arrays; +import java.util.Base64; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * An abstract implementation of {@link java.sql.Array} used as a base class for {@link + * BigQueryArrowArray} and {@link BigQueryJsonArray}. An Array value is a transaction-duration + * reference to an SQL ARRAY value. + */ +abstract class BigQueryBaseArray implements java.sql.Array { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryBaseArray.class.getName()); + + protected final boolean arrayOfStruct; + private boolean valid; + protected Field schema; + + BigQueryBaseArray(Field schema) { + this.schema = schema; + this.arrayOfStruct = isStruct(schema); + this.valid = true; + } + + @Override + public final String getBaseTypeName() { + LOG.finest("++enter++"); + ensureValid(); + return this.schema.getType().getStandardType().name(); + } + + @Override + public final int getBaseType() { + LOG.finest("++enter++"); + ensureValid(); + return BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get( + schema.getType().getStandardType()); + } + + @Override + public final Object getArray(Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final Object getArray(long index, int count, Map> map) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final ResultSet getResultSet(Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final ResultSet getResultSet(long index, int count, Map> map) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + protected Object getArrayInternal(int fromIndex, int toIndexExclusive) { + LOG.finest("++enter++"); + Class targetClass = getTargetClass(); + int size = toIndexExclusive - fromIndex; + Object javaArray = Array.newInstance(targetClass, size); + + for (int index = 0; index < size; index++) { + Array.set(javaArray, index, getCoercedValue(fromIndex + index)); + } + return javaArray; + } + + protected void ensureValid() throws IllegalStateException { + LOG.finest("++enter++"); + if (!this.valid) { + throw new IllegalStateException(INVALID_ARRAY); + } + } + + protected void markInvalid() { + LOG.finest("++enter++"); + this.schema = null; + this.valid = false; + } + + protected Field singleElementSchema() { + LOG.finest("++enter++"); + return this.schema.toBuilder().setMode(Mode.REQUIRED).build(); + } + + protected Tuple createRange(long index, int count, int size) + throws IllegalStateException { + LOG.finest("++enter++"); + // jdbc array follows 1 based array indexing + long normalisedFromIndex = index - 1; + if (normalisedFromIndex + count > size) { + throw new IllegalArgumentException( + String.format( + "The array index is out of range: %d, number of elements: %d.", index + count, size)); + } + long toIndex = normalisedFromIndex + count; + return Tuple.of((int) normalisedFromIndex, (int) toIndex); + } + + protected Class getTargetClass() { + LOG.finest("++enter++"); + return this.arrayOfStruct + ? Struct.class + : BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + this.schema.getType().getStandardType()); + } + + abstract Object getCoercedValue(int index); + + static boolean isArray(Field currentSchema) { + LOG.finest("++enter++"); + return currentSchema.getMode() == REPEATED; + } + + @Override + public String toString() { + try { + Object[] array = (Object[]) getArray(); + if (array == null) { + return "null"; + } + if (this.schema.getType().getStandardType() == StandardSQLTypeName.BYTES) { + return Arrays.stream(array) + .map( + element -> + element == null ? "null" : Base64.getEncoder().encodeToString((byte[]) element)) + .collect(Collectors.joining(", ", "[", "]")); + } + return Arrays.deepToString(array); + } catch (SQLException e) { + return "[Error converting array to string: " + e.getMessage() + "]"; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSet.java new file mode 100644 index 0000000000..4ff4acad6b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSet.java @@ -0,0 +1,617 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryConversionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionNotFoundException; +import java.io.InputStream; +import java.io.Reader; +import java.io.StringReader; +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Calendar; + +public abstract class BigQueryBaseResultSet extends BigQueryNoOpsResultSet + implements BigQueryResultSet { + protected final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private BigQuery bigQuery; + private JobId jobId; + private String queryId; + private QueryStatistics queryStatistics; + protected final BigQueryStatement statement; + protected final Schema schema; + protected final FieldList schemaFieldList; + protected final boolean isNested; + protected boolean isClosed = false; + protected boolean wasNull = false; + protected final BigQueryTypeCoercer bigQueryTypeCoercer = BigQueryTypeCoercionUtility.INSTANCE; + + protected BigQueryBaseResultSet( + BigQuery bigQuery, BigQueryStatement statement, Schema schema, boolean isNested) { + this.bigQuery = bigQuery; + this.statement = statement; + this.schema = schema; + this.schemaFieldList = schema != null ? schema.getFields() : null; + this.isNested = isNested; + } + + public QueryStatistics getQueryStatistics() { + if (queryStatistics != null) { + return queryStatistics; + } + if (jobId == null || bigQuery == null) { + return null; + } + Job job = bigQuery.getJob(jobId); + queryStatistics = job != null ? job.getStatistics() : null; + return queryStatistics; + } + + public void setJobId(JobId jobId) { + this.jobId = jobId; + } + + public JobId getJobId() { + return jobId; + } + + public void setQueryId(String queryId) { + this.queryId = queryId; + } + + public String getQueryId() { + return queryId; + } + + @Override + public void close() { + try { + if (statement != null && statement.isCloseOnCompletion() && !statement.hasMoreResults()) { + statement.close(); + } + } catch (SQLException ex) { + LOG.warning("Exception during ResultState.close() operation: %s", ex.getMessage()); + } + } + + protected SQLException createCoercionException( + int columnIndex, Class targetClass, Exception cause) throws SQLException { + checkClosed(); + StandardSQLTypeName type; + String typeName; + + if (isNested) { + if (columnIndex == 1) { + return new BigQueryConversionException( + String.format("Cannot convert index column to type %s.", targetClass.getSimpleName()), + cause); + } else if (columnIndex == 2) { + Field arrayField = this.schema.getFields().get(0); + type = arrayField.getType().getStandardType(); + typeName = type.name(); + } else { + throw new SQLException( + "For a nested ResultSet from an Array, columnIndex must be 1 or 2.", cause); + } + } else { + Field field = this.schemaFieldList.get(columnIndex - 1); + type = field.getType().getStandardType(); + typeName = type.name(); + } + return new BigQueryConversionException( + String.format( + "Cannot convert value of type %s to type %s.", typeName, targetClass.getSimpleName()), + cause); + } + + private StandardSQLTypeName getStandardSQLTypeName(int columnIndex) throws SQLException { + checkClosed(); + if (isNested) { + if (columnIndex == 1) { + return StandardSQLTypeName.INT64; + } else if (columnIndex == 2) { + if (this.schema == null || this.schema.getFields().isEmpty()) { + throw new SQLException("Schema not available for nested result set."); + } + Field arrayField = this.schema.getFields().get(0); + return arrayField.getType().getStandardType(); + } else { + throw new SQLException("For a nested ResultSet from an Array, columnIndex must be 1 or 2."); + } + } else { + if (this.schemaFieldList == null + || columnIndex > this.schemaFieldList.size() + || columnIndex < 1) { + throw new SQLException("Invalid column index: " + columnIndex); + } + Field field = this.schemaFieldList.get(columnIndex - 1); + return field.getType().getStandardType(); + } + } + + protected void setWasNull(Object val) { + this.wasNull = val == null; + } + + @Override + public boolean wasNull() throws SQLException { + checkClosed(); + return this.wasNull; + } + + @Override + public ResultSetMetaData getMetaData() throws SQLException { + checkClosed(); + if (this.isNested) { + return BigQueryResultSetMetadata.of(this.schemaFieldList, this.statement); + } else { + return BigQueryResultSetMetadata.of(this.schema.getFields(), this.statement); + } + } + + @Override + public int getType() throws SQLException { + checkClosed(); + return ResultSet.TYPE_FORWARD_ONLY; + } + + @Override + public int getConcurrency() throws SQLException { + checkClosed(); + return ResultSet.CONCUR_READ_ONLY; + } + + @Override + public Statement getStatement() throws SQLException { + checkClosed(); + return this.statement; + } + + @Override + public int getHoldability() throws SQLException { + checkClosed(); + return ResultSet.HOLD_CURSORS_OVER_COMMIT; + } + + @Override + public boolean isClosed() { + return this.isClosed; + } + + public abstract Object getObject(int columnIndex) throws SQLException; + + protected int getColumnIndex(String columnLabel) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (columnLabel == null) { + throw new SQLException("Column label cannot be null"); + } + // use schema to get the column index, add 1 for SQL index + return this.schemaFieldList.getIndex(columnLabel) + 1; + } + + @Override + public String getString(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(String.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, String.class, e); + } + } + + @Override + public boolean getBoolean(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + + StandardSQLTypeName type = getStandardSQLTypeName(columnIndex); + if (type == StandardSQLTypeName.GEOGRAPHY + || type == StandardSQLTypeName.RANGE + || type == StandardSQLTypeName.JSON) { + throw createCoercionException(columnIndex, Boolean.class, null); + } + + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Boolean.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, Boolean.class, e); + } + } + + @Override + public byte getByte(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Byte.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Byte.class, e); + } + } + + @Override + public short getShort(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Short.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Short.class, e); + } + } + + @Override + public int getInt(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Integer.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Integer.class, e); + } + } + + @Override + public long getLong(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Long.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Long.class, e); + } + } + + @Override + public float getFloat(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Float.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Float.class, e); + } + } + + @Override + public double getDouble(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(Double.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, Double.class, e); + } + } + + @Override + public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(BigDecimal.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, BigDecimal.class, e); + } + } + + @Override + public byte[] getBytes(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(byte[].class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, byte[].class, e); + } + } + + @Override + public Date getDate(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(java.sql.Date.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, java.sql.Date.class, e); + } + } + + @Override + public Time getTime(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + StandardSQLTypeName type = getStandardSQLTypeName(columnIndex); + if (type == StandardSQLTypeName.INT64) { + throw createCoercionException(columnIndex, java.sql.Time.class, null); + } + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(java.sql.Time.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, java.sql.Time.class, e); + } + } + + @Override + public Timestamp getTimestamp(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + StandardSQLTypeName type = getStandardSQLTypeName(columnIndex); + if (type == StandardSQLTypeName.INT64) { + throw createCoercionException(columnIndex, java.sql.Timestamp.class, null); + } + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(java.sql.Timestamp.class, value); + } catch (BigQueryJdbcCoercionNotFoundException e) { + throw createCoercionException(columnIndex, java.sql.Timestamp.class, e); + } + } + + @Override + public BigDecimal getBigDecimal(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + Object value = getObject(columnIndex); + return this.bigQueryTypeCoercer.coerceTo(BigDecimal.class, value); + } catch (BigQueryJdbcCoercionNotFoundException | BigQueryJdbcCoercionException e) { + throw createCoercionException(columnIndex, BigDecimal.class, e); + } + } + + @Override + public Array getArray(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + try { + return (Array) getObject(columnIndex); + } catch (ClassCastException e) { + throw createCoercionException(columnIndex, Array.class, e); + } + } + + @Override + public Blob getBlob(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + byte[] value = getBytes(columnIndex); + return new javax.sql.rowset.serial.SerialBlob(value); + } + + @Override + public Clob getClob(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + String value = getString(columnIndex); + return new javax.sql.rowset.serial.SerialClob(value.toCharArray()); + } + + @Override + public Reader getCharacterStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + String value = getString(columnIndex); + return value == null ? null : new StringReader(value); + } + + private InputStream getInputStream(String value, java.nio.charset.Charset charset) { + LOG.finest("++enter++"); + if (value == null) { + return null; + } + return new java.io.ByteArrayInputStream(value.getBytes(charset)); + } + + @Override + public InputStream getAsciiStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + return getInputStream(getString(columnIndex), StandardCharsets.US_ASCII); + } + + @Override + public InputStream getUnicodeStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + return getInputStream(getString(columnIndex), StandardCharsets.UTF_16LE); + } + + @Override + public InputStream getBinaryStream(int columnIndex) throws SQLException { + LOG.finest("++enter++"); + byte[] bytes = getBytes(columnIndex); + return bytes == null ? null : new java.io.ByteArrayInputStream(bytes); + } + + @Override + public Date getDate(int columnIndex, Calendar cal) throws SQLException { + LOG.finest("++enter++"); + Date date = getDate(columnIndex); + if (date == null || cal == null) { + return null; + } + cal.setTimeInMillis(date.getTime()); + return new java.sql.Date(cal.getTimeInMillis()); + } + + @Override + public Time getTime(int columnIndex, Calendar cal) throws SQLException { + LOG.finest("++enter++"); + Time time = getTime(columnIndex); + if (time == null || cal == null) { + return null; + } + cal.setTimeInMillis(time.getTime()); + return new java.sql.Time(cal.getTimeInMillis()); + } + + @Override + public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { + LOG.finest("++enter++"); + Timestamp timeStamp = getTimestamp(columnIndex); + if (timeStamp == null || cal == null) { + return null; + } + cal.setTimeInMillis(timeStamp.getTime()); + return new java.sql.Timestamp(cal.getTimeInMillis()); + } + + @Override + public int findColumn(String columnLabel) throws SQLException { + LOG.finest("++enter++"); + return getColumnIndex(columnLabel); + } + + @Override + public Object getObject(String columnLabel) throws SQLException { + return getObject(getColumnIndex(columnLabel)); + } + + @Override + public String getString(String columnLabel) throws SQLException { + return getString(getColumnIndex(columnLabel)); + } + + @Override + public boolean getBoolean(String columnLabel) throws SQLException { + return getBoolean(getColumnIndex(columnLabel)); + } + + @Override + public byte getByte(String columnLabel) throws SQLException { + return getByte(getColumnIndex(columnLabel)); + } + + @Override + public short getShort(String columnLabel) throws SQLException { + return getShort(getColumnIndex(columnLabel)); + } + + @Override + public int getInt(String columnLabel) throws SQLException { + return getInt(getColumnIndex(columnLabel)); + } + + @Override + public long getLong(String columnLabel) throws SQLException { + return getLong(getColumnIndex(columnLabel)); + } + + @Override + public float getFloat(String columnLabel) throws SQLException { + return getFloat(getColumnIndex(columnLabel)); + } + + @Override + public double getDouble(String columnLabel) throws SQLException { + return getDouble(getColumnIndex(columnLabel)); + } + + @Override + public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { + return getBigDecimal(getColumnIndex(columnLabel), scale); + } + + @Override + public byte[] getBytes(String columnLabel) throws SQLException { + return getBytes(getColumnIndex(columnLabel)); + } + + @Override + public Date getDate(String columnLabel) throws SQLException { + return getDate(getColumnIndex(columnLabel)); + } + + @Override + public Time getTime(String columnLabel) throws SQLException { + return getTime(getColumnIndex(columnLabel)); + } + + @Override + public Timestamp getTimestamp(String columnLabel) throws SQLException { + return getTimestamp(getColumnIndex(columnLabel)); + } + + @Override + public InputStream getAsciiStream(String columnLabel) throws SQLException { + return getAsciiStream(getColumnIndex(columnLabel)); + } + + @Override + public InputStream getUnicodeStream(String columnLabel) throws SQLException { + return getUnicodeStream(getColumnIndex(columnLabel)); + } + + @Override + public InputStream getBinaryStream(String columnLabel) throws SQLException { + return getBinaryStream(getColumnIndex(columnLabel)); + } + + @Override + public BigDecimal getBigDecimal(String columnLabel) throws SQLException { + return getBigDecimal(getColumnIndex(columnLabel)); + } + + @Override + public Blob getBlob(String columnLabel) throws SQLException { + return getBlob(getColumnIndex(columnLabel)); + } + + @Override + public Clob getClob(String columnLabel) throws SQLException { + return getClob(getColumnIndex(columnLabel)); + } + + @Override + public Array getArray(String columnLabel) throws SQLException { + return getArray(getColumnIndex(columnLabel)); + } + + @Override + public Reader getCharacterStream(String columnLabel) throws SQLException { + return getCharacterStream(getColumnIndex(columnLabel)); + } + + @Override + public Date getDate(String columnLabel, Calendar cal) throws SQLException { + return getDate(getColumnIndex(columnLabel), cal); + } + + @Override + public Time getTime(String columnLabel, Calendar cal) throws SQLException { + return getTime(getColumnIndex(columnLabel), cal); + } + + @Override + public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { + return getTimestamp(getColumnIndex(columnLabel), cal); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseStruct.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseStruct.java new file mode 100644 index 0000000000..ab9cf61cb8 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryBaseStruct.java @@ -0,0 +1,97 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.STRUCT; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.Date; +import java.sql.SQLException; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Base64; +import java.util.Map; + +/** + * An abstract implementation of {@link java.sql.Struct} used as a base class for {@link + * BigQueryArrowStruct} and {@link BigQueryJsonStruct}. A Struct object contains a value for each + * attribute of the SQL structured type that it represents. + */ +abstract class BigQueryBaseStruct implements java.sql.Struct { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryBaseStruct.class.getName()); + + abstract FieldList getSchema(); + + @Override + public final String getSQLTypeName() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Override + public final Object[] getAttributes(Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + static boolean isStruct(Field currentSchema) { + LOG.finest("++enter++"); + return currentSchema.getType().getStandardType() == STRUCT; + } + + @Override + public String toString() { + try { + FieldList schema = getSchema(); + Object[] attributes = getAttributes(); + + if (schema == null || attributes == null || schema.size() != attributes.length) { + return "{}"; + } + + StringBuilder sb = new StringBuilder("{"); + for (int i = 0; i < attributes.length; i++) { + if (i > 0) { + sb.append(","); + } + String fieldName = schema.get(i).getName(); + Object value = attributes[i]; + + sb.append("\"").append(fieldName.replace("\"", "\\\"")).append("\":"); + + if (value == null) { + sb.append("null"); + } else if (value instanceof String || value instanceof org.apache.arrow.vector.util.Text) { + String stringValue = value.toString().replace("\"", "\\\""); + sb.append("\"").append(stringValue).append("\""); + } else if (value instanceof Timestamp || value instanceof Date || value instanceof Time) { + sb.append("\"").append(value.toString()).append("\""); + } else if (value instanceof byte[]) { + sb.append("\"").append(Base64.getEncoder().encodeToString((byte[]) value)).append("\""); + } else { + sb.append(value.toString()); + } + } + sb.append("}"); + return sb.toString(); + } catch (SQLException e) { + return "{ \"error\": \"Error converting struct to string: " + e.getMessage() + "\" }"; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatement.java new file mode 100644 index 0000000000..4de22e64e9 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatement.java @@ -0,0 +1,1334 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import com.google.common.annotations.VisibleForTesting; +import java.io.BufferedReader; +import java.io.CharArrayReader; +import java.io.FilterReader; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.PipedReader; +import java.io.Reader; +import java.io.StringReader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.CallableStatement; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.Calendar; +import java.util.Map; + +class BigQueryCallableStatement extends BigQueryPreparedStatement implements CallableStatement { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + + BigQueryCallableStatement(BigQueryConnection connection, String callableStmtSql) + throws SQLException { + super(connection, callableStmtSql); + } + + @VisibleForTesting + protected String getCallableStatementSql() { + return this.currentQuery; + } + + @VisibleForTesting + protected BigQueryParameterHandler getParameterHandler() { + return this.parameterHandler; + } + + @Override + public Array getArray(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Array) { + return (Array) param; + } + if (param.getClass().isAssignableFrom(Array.class)) { + return getObject(arg0, Array.class); + } + return null; + } + + @Override + public Array getArray(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Array) { + return (Array) param; + } + if (param.getClass().isAssignableFrom(Array.class)) { + return getObject(arg0, Array.class); + } + return null; + } + + @Override + public BigDecimal getBigDecimal(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof BigDecimal) { + return (BigDecimal) param; + } + if (param.getClass().isAssignableFrom(BigDecimal.class)) { + return getObject(arg0, BigDecimal.class); + } + return null; + } + + @Override + public BigDecimal getBigDecimal(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof BigDecimal) { + return (BigDecimal) param; + } + if (param.getClass().isAssignableFrom(BigDecimal.class)) { + return getObject(arg0, BigDecimal.class); + } + return null; + } + + @Override + public BigDecimal getBigDecimal(int arg0, int arg1) throws SQLException { + LOG.finest("++enter++"); + return getBigDecimal(arg0); + } + + @Override + public Blob getBlob(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Blob) { + return (Blob) param; + } + if (param.getClass().isAssignableFrom(Blob.class)) { + return getObject(arg0, Blob.class); + } + return null; + } + + @Override + public Blob getBlob(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Blob) { + return (Blob) param; + } + if (param.getClass().isAssignableFrom(Blob.class)) { + return getObject(arg0, Blob.class); + } + return null; + } + + @Override + public boolean getBoolean(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Boolean) { + return (Boolean) param; + } + if (param.getClass().isAssignableFrom(Boolean.class)) { + return getObject(arg0, Boolean.class); + } + return false; + } + + @Override + public boolean getBoolean(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Boolean) { + return (Boolean) param; + } + if (param.getClass().isAssignableFrom(Boolean.class)) { + return getObject(arg0, Boolean.class); + } + return false; + } + + @Override + public byte getByte(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Byte) { + return (Byte) param; + } + if (param.getClass().isAssignableFrom(Byte.class)) { + return getObject(arg0, Byte.class); + } + return -1; + } + + @Override + public byte getByte(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Byte) { + return (Byte) param; + } + if (param.getClass().isAssignableFrom(Byte.class)) { + return getObject(arg0, Byte.class); + } + return -1; + } + + @Override + public byte[] getBytes(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof byte[] || param.getClass().isAssignableFrom(byte[].class)) { + return (byte[]) param; + } + if (param instanceof String) { + return param.toString().getBytes(); + } + return null; + } + + @Override + public byte[] getBytes(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof byte[] || param.getClass().isAssignableFrom(byte[].class)) { + return (byte[]) param; + } + if (param instanceof String) { + return param.toString().getBytes(); + } + return null; + } + + // FilterReader, InputStreamReader, PipedReader, StringReader + @Override + public Reader getCharacterStream(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String || param.getClass().isAssignableFrom(String.class)) { + return new StringReader(param.toString()); + } + + if (param instanceof BufferedReader) { + return (BufferedReader) param; + } + if (param.getClass().isAssignableFrom(BufferedReader.class)) { + return getObject(arg0, BufferedReader.class); + } + + if (param instanceof CharArrayReader) { + return (CharArrayReader) param; + } + if (param.getClass().isAssignableFrom(CharArrayReader.class)) { + return getObject(arg0, CharArrayReader.class); + } + + if (param instanceof FilterReader) { + return (FilterReader) param; + } + if (param.getClass().isAssignableFrom(FilterReader.class)) { + return getObject(arg0, FilterReader.class); + } + + if (param instanceof InputStreamReader) { + return (InputStreamReader) param; + } + if (param.getClass().isAssignableFrom(InputStreamReader.class)) { + return getObject(arg0, InputStreamReader.class); + } + + if (param instanceof PipedReader) { + return (PipedReader) param; + } + if (param.getClass().isAssignableFrom(PipedReader.class)) { + return getObject(arg0, PipedReader.class); + } + + if (param instanceof StringReader) { + return (StringReader) param; + } + if (param.getClass().isAssignableFrom(StringReader.class)) { + return getObject(arg0, StringReader.class); + } + return null; + } + + @Override + public Reader getCharacterStream(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String || param.getClass().isAssignableFrom(String.class)) { + return new StringReader(param.toString()); + } + + if (param instanceof BufferedReader) { + return (BufferedReader) param; + } + if (param.getClass().isAssignableFrom(BufferedReader.class)) { + return getObject(arg0, BufferedReader.class); + } + + if (param instanceof CharArrayReader) { + return (CharArrayReader) param; + } + if (param.getClass().isAssignableFrom(CharArrayReader.class)) { + return getObject(arg0, CharArrayReader.class); + } + + if (param instanceof FilterReader) { + return (FilterReader) param; + } + if (param.getClass().isAssignableFrom(FilterReader.class)) { + return getObject(arg0, FilterReader.class); + } + + if (param instanceof InputStreamReader) { + return (InputStreamReader) param; + } + if (param.getClass().isAssignableFrom(InputStreamReader.class)) { + return getObject(arg0, InputStreamReader.class); + } + + if (param instanceof PipedReader) { + return (PipedReader) param; + } + if (param.getClass().isAssignableFrom(PipedReader.class)) { + return getObject(arg0, PipedReader.class); + } + + if (param instanceof StringReader) { + return (StringReader) param; + } + if (param.getClass().isAssignableFrom(StringReader.class)) { + return getObject(arg0, StringReader.class); + } + return null; + } + + @Override + public Clob getClob(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Clob) { + return (Clob) param; + } + if (param.getClass().isAssignableFrom(Clob.class)) { + return getObject(arg0, Clob.class); + } + return null; + } + + @Override + public Clob getClob(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Clob) { + return (Clob) param; + } + if (param.getClass().isAssignableFrom(Clob.class)) { + return getObject(arg0, Clob.class); + } + return null; + } + + @Override + public Date getDate(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + return (Date) param; + } + if (param.getClass().isAssignableFrom(Date.class)) { + return getObject(arg0, Date.class); + } + return null; + } + + @Override + public Date getDate(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + return (Date) param; + } + if (param.getClass().isAssignableFrom(Date.class)) { + return getObject(arg0, Date.class); + } + return null; + } + + @Override + public Date getDate(int arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + Date dateParam = (Date) param; + if (arg1 != null) { + arg1.setTime(dateParam); + return new Date(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Date.class)) { + Date dateObj = getObject(arg0, Date.class); + if (arg1 != null) { + arg1.setTime(dateObj); + return new Date(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Date getDate(String arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Date) { + Date dateParam = (Date) param; + if (arg1 != null) { + arg1.setTime(dateParam); + return new Date(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Date.class)) { + Date dateObj = getObject(arg0, Date.class); + if (arg1 != null) { + arg1.setTime(dateObj); + return new Date(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public double getDouble(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Double) { + return (Double) param; + } + if (param.getClass().isAssignableFrom(Double.class)) { + return getObject(arg0, Double.class); + } + return 0; + } + + @Override + public double getDouble(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Double) { + return (Double) param; + } + if (param.getClass().isAssignableFrom(Double.class)) { + return getObject(arg0, Double.class); + } + return 0; + } + + @Override + public float getFloat(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Float) { + return (Float) param; + } + if (param.getClass().isAssignableFrom(Float.class)) { + return getObject(arg0, Float.class); + } + return 0; + } + + @Override + public float getFloat(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Float) { + return (Float) param; + } + if (param.getClass().isAssignableFrom(Float.class)) { + return getObject(arg0, Float.class); + } + return 0; + } + + @Override + public int getInt(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Integer) { + return (Integer) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public int getInt(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Integer) { + return (Integer) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public long getLong(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Long) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Long.class)) { + return getObject(arg0, Long.class); + } + if (param instanceof Integer) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public long getLong(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Long) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Long.class)) { + return getObject(arg0, Long.class); + } + if (param instanceof Integer) { + return (Long) param; + } + if (param.getClass().isAssignableFrom(Integer.class)) { + return getObject(arg0, Integer.class); + } + return 0; + } + + @Override + public Reader getNCharacterStream(int arg0) throws SQLException { + LOG.finest("++enter++"); + return getCharacterStream(arg0); + } + + @Override + public Reader getNCharacterStream(String arg0) throws SQLException { + LOG.finest("++enter++"); + return getCharacterStream(arg0); + } + + @Override + public NClob getNClob(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof NClob) {} + if (param.getClass().isAssignableFrom(NClob.class)) { + return getObject(arg0, NClob.class); + } + return null; + } + + @Override + public NClob getNClob(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof NClob) { + return (NClob) param; + } + if (param.getClass().isAssignableFrom(NClob.class)) { + return getObject(arg0, NClob.class); + } + return null; + } + + @Override + public String getNString(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public String getNString(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public Object getObject(int arg0) throws SQLException { + LOG.finest("++enter++"); + return this.parameterHandler.getParameter(arg0); + } + + @Override + public Object getObject(String arg0) throws SQLException { + LOG.finest("++enter++"); + return this.parameterHandler.getParameter(arg0); + } + + @Override + public Object getObject(int arg0, Map> arg1) throws SQLException { + LOG.finest("++enter++"); + String paramKey = this.parameterHandler.getSqlType(arg0).name(); + if (arg1.containsKey(paramKey)) { + Class argJavaType = arg1.get(paramKey); + Class paramJavaType = this.parameterHandler.getType(arg0); + if (paramJavaType.isAssignableFrom(argJavaType)) { + return this.parameterHandler.getParameter(arg0); + } + } + return null; + } + + @Override + public Object getObject(String arg0, Map> arg1) throws SQLException { + LOG.finest("++enter++"); + String paramKey = this.parameterHandler.getSqlType(arg0).name(); + if (arg1.containsKey(paramKey)) { + Class argJavaType = arg1.get(paramKey); + Class paramJavaType = this.parameterHandler.getType(arg0); + if (paramJavaType.isAssignableFrom(argJavaType)) { + return this.parameterHandler.getParameter(arg0); + } + } + return null; + } + + @Override + public T getObject(int arg0, Class arg1) throws SQLException { + LOG.finest("++enter++"); + Class javaType = this.parameterHandler.getType(arg0); + if (javaType.isAssignableFrom(arg1)) { + return (T) this.parameterHandler.getParameter(arg0); + } + return null; + } + + @Override + public T getObject(String arg0, Class arg1) throws SQLException { + LOG.finest("++enter++"); + Class javaType = this.parameterHandler.getType(arg0); + if (javaType.isAssignableFrom(arg1)) { + return (T) this.parameterHandler.getParameter(arg0); + } + return null; + } + + @Override + public Ref getRef(int arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public Ref getRef(String arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public RowId getRowId(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof RowId) { + return (RowId) param; + } + if (param.getClass().isAssignableFrom(RowId.class)) { + return getObject(arg0, RowId.class); + } + return null; + } + + @Override + public RowId getRowId(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof RowId) { + return (RowId) param; + } + if (param.getClass().isAssignableFrom(RowId.class)) { + return getObject(arg0, RowId.class); + } + return null; + } + + @Override + public SQLXML getSQLXML(int arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public SQLXML getSQLXML(String arg0) throws SQLException { + // TODO Auto-generated method stub + return null; + } + + @Override + public short getShort(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Short) { + return (Short) param; + } + if (param.getClass().isAssignableFrom(Short.class)) { + return getObject(arg0, Short.class); + } + return 0; + } + + @Override + public short getShort(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Short) { + return (Short) param; + } + if (param.getClass().isAssignableFrom(Short.class)) { + return getObject(arg0, Short.class); + } + return 0; + } + + @Override + public String getString(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public String getString(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof String) { + return param.toString(); + } + if (param.getClass().isAssignableFrom(String.class)) { + return getObject(arg0, String.class); + } + return null; + } + + @Override + public Time getTime(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + return (Time) param; + } + if (param.getClass().isAssignableFrom(Time.class)) { + return getObject(arg0, Time.class); + } + return null; + } + + @Override + public Time getTime(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + return (Time) param; + } + if (param.getClass().isAssignableFrom(Time.class)) { + return getObject(arg0, Time.class); + } + return null; + } + + @Override + public Time getTime(int arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + Time timeParam = (Time) param; + if (arg1 != null) { + arg1.setTimeInMillis(timeParam.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Time.class)) { + Time timeObj = getObject(arg0, Time.class); + if (arg1 != null) { + arg1.setTimeInMillis(timeObj.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Time getTime(String arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Time) { + Time timeParam = (Time) param; + if (arg1 != null) { + arg1.setTimeInMillis(timeParam.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Time.class)) { + Time timeObj = getObject(arg0, Time.class); + if (arg1 != null) { + arg1.setTimeInMillis(timeObj.getTime()); + return new Time(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Timestamp getTimestamp(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + return (Timestamp) param; + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + return getObject(arg0, Timestamp.class); + } + return null; + } + + @Override + public Timestamp getTimestamp(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + return (Timestamp) param; + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + return getObject(arg0, Timestamp.class); + } + return null; + } + + @Override + public Timestamp getTimestamp(int arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + Timestamp timestampParam = (Timestamp) param; + if (arg1 != null) { + arg1.setTimeInMillis(timestampParam.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + Timestamp timestampObj = getObject(arg0, Timestamp.class); + if (arg1 != null) { + arg1.setTimeInMillis(timestampObj.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public Timestamp getTimestamp(String arg0, Calendar arg1) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof Timestamp) { + Timestamp timestampParam = (Timestamp) param; + if (arg1 != null) { + arg1.setTimeInMillis(timestampParam.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + if (param.getClass().isAssignableFrom(Timestamp.class)) { + Timestamp timestampObj = getObject(arg0, Timestamp.class); + if (arg1 != null) { + arg1.setTimeInMillis(timestampObj.getTime()); + return new Timestamp(arg1.getTimeInMillis()); + } + } + return null; + } + + @Override + public URL getURL(int arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof URL) { + return (URL) param; + } + if (param.getClass().isAssignableFrom(URL.class)) { + return getObject(arg0, URL.class); + } + return null; + } + + @Override + public URL getURL(String arg0) throws SQLException { + LOG.finest("++enter++"); + Object param = this.parameterHandler.getParameter(arg0); + if (param instanceof URL) { + return (URL) param; + } + if (param.getClass().isAssignableFrom(URL.class)) { + return getObject(arg0, URL.class); + } + return null; + } + + @Override + public void registerOutParameter(int paramIndex, int sqlType) throws SQLException { + LOG.finest("++enter++"); + LOG.finest("registerOutParameter: paramIndex %s, sqlType %s", paramIndex, sqlType); + checkClosed(); + try { + this.parameterHandler.setParameter( + paramIndex, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + -1); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(String paramName, int sqlType) throws SQLException { + LOG.finest("++enter++"); + LOG.finest("registerOutParameter: paramName %s, sqlType %s", paramName, sqlType); + checkClosed(); + try { + this.parameterHandler.setParameter( + paramName, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + -1); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(int paramIndex, int sqlType, int scale) throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + "registerOutParameter: paramIndex %s, sqlType %s, scale %s", paramIndex, sqlType, scale); + checkClosed(); + if (sqlType != Types.NUMERIC && sqlType != Types.DECIMAL) { + throw new IllegalArgumentException( + String.format("registerOutParameter: Invalid sqlType passed in %s", sqlType)); + } + try { + this.parameterHandler.setParameter( + paramIndex, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + scale); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(int paramIndex, int sqlType, String typeName) + throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + "registerOutParameter: paramIndex %s, sqlType %s, typeName %s", + paramIndex, sqlType, typeName); + // fully qualified sql typeName is not supported by the driver and hence ignored. + registerOutParameter(paramIndex, sqlType); + } + + @Override + public void registerOutParameter(String paramName, int sqlType, int scale) throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + "registerOutParameter: paramIndex %s, sqlType %s, scale %s", paramName, sqlType, scale); + checkClosed(); + if (sqlType != Types.NUMERIC && sqlType != Types.DECIMAL) { + throw new IllegalArgumentException( + String.format("registerOutParameter: Invalid sqlType passed in %s", sqlType)); + } + try { + this.parameterHandler.setParameter( + paramName, + null, + BigQueryJdbcTypeMappings.getJavaType(sqlType), + BigQueryParameterHandler.BigQueryStatementParameterType.OUT, + scale); + } catch (Exception e) { + throw new SQLException(e); + } + } + + @Override + public void registerOutParameter(String paramName, int sqlType, String typeName) + throws SQLException { + LOG.finest("++enter++"); + LOG.finest( + "registerOutParameter: paramIndex %s, sqlType %s, typeName %s", + paramName, sqlType, typeName); + // fully qualified sql typeName is not supported by the driver and hence ignored. + registerOutParameter(paramName, sqlType); + } + + @Override + public void setAsciiStream(String arg0, InputStream arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setAsciiStream(String arg0, InputStream arg1, int arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setAsciiStream(String arg0, InputStream arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBigDecimal(String arg0, BigDecimal arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setBinaryStream(String arg0, InputStream arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setBinaryStream(String arg0, InputStream arg1, int arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBinaryStream(String arg0, InputStream arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setBlob(String arg0, Blob arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBlob(String arg0, InputStream arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBlob(String arg0, InputStream arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setBoolean(String arg0, boolean arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Boolean.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setByte(String arg0, byte arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Byte.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setBytes(String arg0, byte[] arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, byte[].class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setCharacterStream(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setCharacterStream(String arg0, Reader arg1, int arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setCharacterStream(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setClob(String arg0, Clob arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setClob(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setClob(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setDate(String arg0, Date arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setDate(String arg0, Date arg1, Calendar arg2) throws SQLException { + arg2.setTimeInMillis(arg1.getTime()); + this.parameterHandler.setParameter( + arg0, + new Date(arg2.getTimeInMillis()), + arg1.getClass(), + BigQueryStatementParameterType.IN, + 0); + } + + @Override + public void setDouble(String arg0, double arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Double.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setFloat(String arg0, float arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Float.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setInt(String arg0, int arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Integer.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setLong(String arg0, long arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Long.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setNCharacterStream(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNCharacterStream(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + } + + @Override + public void setNClob(String arg0, NClob arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNClob(String arg0, Reader arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNClob(String arg0, Reader arg1, long arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNString(String arg0, String arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setNull(String arg0, int arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setNull(String arg0, int arg1, String arg2) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setObject(String arg0, Object arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setObject(String arg0, Object arg1, int arg2) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + StandardSQLTypeName sqlType = this.parameterHandler.getSqlType(arg0); + if (BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.containsKey(sqlType)) { + int javaSqlType = BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get(sqlType); + if (javaSqlType != arg2) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("Unsupported sql type:%s ", arg2)); + } + } else { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("parameter sql type not supported: %s", sqlType)); + } + } + + @Override + public void setObject(String arg0, Object arg1, int arg2, int arg3) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, arg3); + StandardSQLTypeName sqlType = this.parameterHandler.getSqlType(arg0); + if (BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.containsKey(sqlType)) { + int javaSqlType = BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get(sqlType); + if (javaSqlType != arg2) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("Unsupported sql type:%s ", arg2)); + } + } else { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + String.format("parameter sql type not supported: %s", sqlType)); + } + } + + @Override + public void setRowId(String arg0, RowId arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setSQLXML(String arg0, SQLXML arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public void setShort(String arg0, short arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, Short.class, BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setString(String arg0, String arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setTime(String arg0, Time arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setTime(String arg0, Time arg1, Calendar arg2) throws SQLException { + arg2.setTimeInMillis(arg1.getTime()); + this.parameterHandler.setParameter( + arg0, + new Time(arg2.getTimeInMillis()), + arg1.getClass(), + BigQueryStatementParameterType.IN, + 0); + } + + @Override + public void setTimestamp(String arg0, Timestamp arg1) throws SQLException { + this.parameterHandler.setParameter( + arg0, arg1, arg1.getClass(), BigQueryStatementParameterType.IN, 0); + } + + @Override + public void setTimestamp(String arg0, Timestamp arg1, Calendar arg2) throws SQLException { + arg2.setTimeInMillis(arg1.getTime()); + this.parameterHandler.setParameter( + arg0, + new Timestamp(arg2.getTimeInMillis()), + arg1.getClass(), + BigQueryStatementParameterType.IN, + 0); + } + + @Override + public void setURL(String arg0, URL arg1) throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + + } + + @Override + public boolean wasNull() throws SQLException { + // TODO: NOT IMPLEMENTED (data type not supported) + return false; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCoercion.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCoercion.java new file mode 100644 index 0000000000..6265af0dec --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryCoercion.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.util.function.Function; + +/** + * A {@link BigQueryCoercion} is responsible for coercing one type to another. An implementation of + * {@link BigQueryCoercion} is used to extend the behaviour of {@link BigQueryTypeCoercer} for the + * coercion of one user defined type to another. + * + * @param represents the source type + * @param represents the target type + */ +@InternalApi +interface BigQueryCoercion extends Function { + /** + * Coerce the provided value to the desired type. + * + * @param value the input value. + * @return the output value after coercion. + */ + OUTPUT coerce(INPUT value); + + @Override + default OUTPUT apply(INPUT input) { + return coerce(input); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnection.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnection.java new file mode 100644 index 0000000000..17471e2522 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnection.java @@ -0,0 +1,1086 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.gax.core.CredentialsProvider; +import com.google.api.gax.core.FixedCredentialsProvider; +import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; +import com.google.api.gax.retrying.RetrySettings; +import com.google.api.gax.rpc.FixedHeaderProvider; +import com.google.api.gax.rpc.HeaderProvider; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.auth.Credentials; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.ConnectionProperty; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.BigQueryReadSettings; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteSettings; +import com.google.cloud.http.HttpTransportOptions; +import java.io.IOException; +import java.io.InputStream; +import java.sql.CallableStatement; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.Statement; +import java.time.Duration; +import java.util.ArrayList; +import java.util.ConcurrentModificationException; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; + +/** + * An implementation of {@link java.sql.Connection} for establishing a connection with BigQuery and + * executing SQL statements + * + * @see BigQueryStatement + */ +public class BigQueryConnection extends BigQueryNoOpsConnection { + + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + String connectionClassName = this.toString(); + private static final String DEFAULT_JDBC_TOKEN_VALUE = "Google-BigQuery-JDBC-Driver"; + private static final String DEFAULT_VERSION = "0.0.0"; + private HeaderProvider headerProvider; + BigQueryReadClient bigQueryReadClient = null; + BigQueryWriteClient bigQueryWriteClient = null; + BigQuery bigQuery; + String connectionUrl; + Set openStatements; + boolean autoCommit; + int transactionIsolation; + List sqlWarnings; + String catalog; + int holdability; + long retryTimeoutInSeconds; + Duration retryTimeoutDuration; + long retryInitialDelayInSeconds; + Duration retryInitialDelayDuration; + long retryMaxDelayInSeconds; + Duration retryMaxDelayDuration; + // transactionStarted is false by default. + // when autocommit is false transaction starts and session is initialized. + boolean transactionStarted; + ConnectionProperty sessionInfoConnectionProperty; + boolean isClosed; + DatasetId defaultDataset; + String location; + boolean enableHighThroughputAPI; + int highThroughputMinTableSize; + int highThroughputActivationRatio; + boolean enableSession; + boolean unsupportedHTAPIFallback; + boolean useQueryCache; + String queryDialect; + int metadataFetchThreadCount; + boolean allowLargeResults; + String destinationTable; + String destinationDataset; + long destinationDatasetExpirationTime; + String kmsKeyName; + String universeDomain; + List queryProperties; + Map authProperties; + Map overrideProperties; + Credentials credentials; + boolean useStatelessQueryMode; + int numBufferedRows; + HttpTransportOptions httpTransportOptions; + TransportChannelProvider transportChannelProvider; + long maxResults; + long jobTimeoutInSeconds; + boolean enableWriteAPI; + int writeAPIActivationRowCount; + int writeAPIAppendRowCount; + int requestGoogleDriveScope; + String additionalProjects; + boolean filterTablesOnDefaultDataset; + String sslTrustStorePath; + String sslTrustStorePassword; + long maxBytesBilled; + Map labels; + Integer httpConnectTimeout; + Integer httpReadTimeout; + String requestReason; + Long connectionPoolSize; + Long listenerPoolSize; + String partnerToken; + + BigQueryConnection(String url) throws IOException { + this(url, DataSource.fromUrl(url)); + } + + BigQueryConnection(String url, DataSource ds) throws IOException { + this.connectionUrl = url; + this.openStatements = ConcurrentHashMap.newKeySet(); + this.autoCommit = true; + this.sqlWarnings = new ArrayList<>(); + this.transactionStarted = false; + this.isClosed = false; + + this.labels = ds.getLabels() != null ? ds.getLabels() : new java.util.HashMap<>(); + this.maxBytesBilled = ds.getMaximumBytesBilled(); + this.retryTimeoutInSeconds = ds.getTimeout(); + this.retryTimeoutDuration = Duration.ofMillis(retryTimeoutInSeconds * 1000L); + this.retryInitialDelayInSeconds = ds.getRetryInitialDelay(); + this.retryInitialDelayDuration = Duration.ofMillis(retryInitialDelayInSeconds * 1000L); + this.retryMaxDelayInSeconds = ds.getRetryMaxDelay(); + this.retryMaxDelayDuration = Duration.ofMillis(retryMaxDelayInSeconds * 1000L); + this.jobTimeoutInSeconds = ds.getJobTimeout(); + this.authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(ds, this.connectionClassName); + this.catalog = ds.getProjectId(); + this.universeDomain = ds.getUniverseDomain(); + + this.overrideProperties = ds.getOverrideProperties(); + if (this.universeDomain != null) { + this.overrideProperties.put( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, this.universeDomain); + } + this.credentials = + BigQueryJdbcOAuthUtility.getCredentials( + authProperties, overrideProperties, this.connectionClassName); + String defaultDatasetString = ds.getDefaultDataset(); + if (defaultDatasetString == null || defaultDatasetString.trim().isEmpty()) { + this.defaultDataset = null; + } else { + String[] parts = defaultDatasetString.split("\\."); + if (parts.length == 2) { + this.defaultDataset = DatasetId.of(parts[0], parts[1]); + } else if (parts.length == 1) { + this.defaultDataset = DatasetId.of(parts[0]); + } else { + throw new IllegalArgumentException( + "DefaultDataset format is invalid. Supported options are datasetId or" + + " projectId.datasetId"); + } + } + this.location = ds.getLocation(); + this.enableHighThroughputAPI = ds.getEnableHighThroughputAPI(); + this.highThroughputMinTableSize = ds.getHighThroughputMinTableSize(); + this.highThroughputActivationRatio = ds.getHighThroughputActivationRatio(); + this.useQueryCache = ds.getUseQueryCache(); + this.useStatelessQueryMode = ds.getUseStatelessQueryMode(); + + this.queryDialect = ds.getQueryDialect(); + this.allowLargeResults = ds.getAllowLargeResults(); + this.destinationTable = ds.getDestinationTable(); + this.destinationDataset = ds.getDestinationDataset(); + this.destinationDatasetExpirationTime = ds.getDestinationDatasetExpirationTime(); + this.kmsKeyName = ds.getKmsKeyName(); + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(ds, this.connectionClassName); + + this.sslTrustStorePath = ds.getSSLTrustStorePath(); + this.sslTrustStorePassword = ds.getSSLTrustStorePassword(); + this.httpConnectTimeout = ds.getHttpConnectTimeout(); + this.httpReadTimeout = ds.getHttpReadTimeout(); + + this.httpTransportOptions = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + proxyProperties, + this.sslTrustStorePath, + this.sslTrustStorePassword, + this.httpConnectTimeout, + this.httpReadTimeout, + this.connectionClassName); + this.transportChannelProvider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + proxyProperties, + this.sslTrustStorePath, + this.sslTrustStorePassword, + this.connectionClassName); + this.enableSession = ds.getEnableSession(); + this.unsupportedHTAPIFallback = ds.getUnsupportedHTAPIFallback(); + this.maxResults = ds.getMaxResults(); + Map queryPropertiesMap = ds.getQueryProperties(); + this.sessionInfoConnectionProperty = getSessionPropertyFromQueryProperties(queryPropertiesMap); + this.queryProperties = convertMapToConnectionPropertiesList(queryPropertiesMap); + this.enableWriteAPI = ds.getEnableWriteAPI(); + this.writeAPIActivationRowCount = ds.getSwaActivationRowCount(); + this.writeAPIAppendRowCount = ds.getSwaAppendRowCount(); + + this.additionalProjects = ds.getAdditionalProjects(); + + this.filterTablesOnDefaultDataset = ds.getFilterTablesOnDefaultDataset(); + this.requestGoogleDriveScope = ds.getRequestGoogleDriveScope(); + this.metadataFetchThreadCount = ds.getMetadataFetchThreadCount(); + this.requestReason = ds.getRequestReason(); + this.connectionPoolSize = ds.getConnectionPoolSize(); + this.listenerPoolSize = ds.getListenerPoolSize(); + this.partnerToken = ds.getPartnerToken(); + + this.headerProvider = createHeaderProvider(); + this.bigQuery = getBigQueryConnection(); + } + + String getLibraryVersion(Class libraryClass) { + LOG.finest("++enter++"); + String version = null; + try (InputStream in = + libraryClass.getResourceAsStream( + "/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (in != null) { + Properties props = new Properties(); + props.load(in); + version = props.getProperty("version.jdbc"); + } + } catch (IOException e) { + return DEFAULT_VERSION; + } + + return version != null ? version : DEFAULT_VERSION; + } + + HeaderProvider createHeaderProvider() { + String headerToken = DEFAULT_JDBC_TOKEN_VALUE + "/" + getLibraryVersion(this.getClass()); + if (this.partnerToken != null && !this.partnerToken.isEmpty()) { + headerToken += this.partnerToken; + } + Map headers = new java.util.HashMap<>(); + headers.put("user-agent", headerToken); + if (this.requestReason != null) { + headers.put("x-goog-request-reason", this.requestReason); + } + return FixedHeaderProvider.create(headers); + } + + protected void addOpenStatements(Statement statement) { + LOG.finest("Statement %s added to Connection %s.", statement, this); + this.openStatements.add(statement); + } + + BigQueryReadClient getBigQueryReadClient() { + try { + if (this.bigQueryReadClient == null) { + this.bigQueryReadClient = getBigQueryReadClientConnection(); + } + } catch (IOException e) { + throw new BigQueryJdbcRuntimeException(e); + } + return this.bigQueryReadClient; + } + + BigQueryWriteClient getBigQueryWriteClient() { + try { + if (this.bigQueryWriteClient == null) { + this.bigQueryWriteClient = getBigQueryWriteClientConnection(); + } + } catch (IOException e) { + throw new BigQueryJdbcRuntimeException(e); + } + return this.bigQueryWriteClient; + } + + BigQuery getBigQuery() { + return this.bigQuery; + } + + String getConnectionUrl() { + return connectionUrl; + } + + /** + * Creates and returns a new {@code Statement} object for executing BigQuery SQL queries + * + * @return a new {@code Statement} object + * @see Connection#createStatement() + */ + @Override + public Statement createStatement() throws SQLException { + checkClosed(); + BigQueryStatement currentStatement = new BigQueryStatement(this); + LOG.fine("Statement %s created.", currentStatement); + addOpenStatements(currentStatement); + return currentStatement; + } + + /** + * Creates and returns a new {@code Statement} object for executing BigQuery SQL queries. This + * method is similar to {@link BigQueryConnection#createStatement()}, but it overrides the type + * and concurrency of the generated {@code ResultSet}. + * + * @throws SQLException if a BigQuery connection error occurs, if this method is called on a + * closed connection, or the given parameters are not {@code ResultSet} constants indicating + * type and concurrency. + * @throws BigQueryJdbcSqlFeatureNotSupportedException if this method is not supported for the + * specified result set type and result set concurrency. + * @see Connection#createStatement(int, int) + * @see ResultSet + */ + @Override + public Statement createStatement(int resultSetType, int resultSetConcurrency) + throws SQLException { + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported createStatement feature."); + } + return createStatement(); + } + + /** + * Creates and returns a new {@code Statement} object for executing BigQuery SQL queries. This + * method is similar to {@link BigQueryConnection#createStatement()}, but it overrides the type, + * concurrency, and holdability of the generated {@code ResultSet}. + * + * @throws SQLException if a BigQuery connection error occurs, if this method is called on a + * closed connection, or the given parameters are not {@code ResultSet} constants indicating + * type, concurrency, and holdability. + * @throws BigQueryJdbcSqlFeatureNotSupportedException if this method is not supported for the + * specified result set type, result set holdability and result set concurrency. + * @see Connection#createStatement(int, int, int) + * @see ResultSet + */ + @Override + public Statement createStatement( + int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY + || resultSetHoldability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported createStatement feature"); + } + return createStatement(); + } + + @Override + public PreparedStatement prepareStatement(String sql) throws SQLException { + checkClosed(); + PreparedStatement currentStatement = new BigQueryPreparedStatement(this, sql); + LOG.fine("Prepared Statement %s created.", currentStatement); + addOpenStatements(currentStatement); + return currentStatement; + } + + @Override + public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { + if (autoGeneratedKeys != Statement.NO_GENERATED_KEYS) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("autoGeneratedKeys is not supported"); + } + return prepareStatement(sql); + } + + @Override + public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException("autoGeneratedKeys is not supported"); + } + + @Override + public PreparedStatement prepareStatement( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY + || resultSetHoldability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported prepareStatement feature"); + } + return prepareStatement(sql); + } + + @Override + public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException { + LOG.finest("++enter++"); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Unsupported prepareStatement feature"); + } + return prepareStatement(sql); + } + + public DatasetId getDefaultDataset() { + checkClosed(); + return this.defaultDataset; + } + + String getDestinationDataset() { + return this.destinationDataset; + } + + String getDestinationTable() { + return this.destinationTable; + } + + long getDestinationDatasetExpirationTime() { + return this.destinationDatasetExpirationTime; + } + + String getKmsKeyName() { + return this.kmsKeyName; + } + + List getQueryProperties() { + return this.queryProperties; + } + + public String getLocation() { + checkClosed(); + return this.location; + } + + public Map getAuthProperties() { + checkClosed(); + return this.authProperties; + } + + long getMaxResults() { + return maxResults; + } + + long getRetryTimeoutInSeconds() { + return this.retryTimeoutInSeconds; + } + + Duration getRetryTimeoutDuration() { + return this.retryTimeoutDuration; + } + + long getRetryInitialDelayInSeconds() { + return this.retryInitialDelayInSeconds; + } + + Duration getRetryInitialDelayDuration() { + return this.retryInitialDelayDuration; + } + + long getRetryMaxDelayInSeconds() { + return this.retryMaxDelayInSeconds; + } + + Duration getRetryMaxDelayDuration() { + return this.retryMaxDelayDuration; + } + + long getJobTimeoutInSeconds() { + return this.jobTimeoutInSeconds; + } + + long getMaxBytesBilled() { + return this.maxBytesBilled; + } + + Map getLabels() { + return this.labels; + } + + /** + * Begins a transaction.
+ * The transaction ends when a {@link BigQueryConnection#commit()} or {@link + * BigQueryConnection#rollback()} is made.
+ * For more information about transactions in BigQuery, see Multi-statement transactions. + */ + private void beginTransaction() { + LOG.finest("++enter++"); + QueryJobConfiguration.Builder transactionBeginJobConfig = + QueryJobConfiguration.newBuilder("BEGIN TRANSACTION;"); + try { + if (this.sessionInfoConnectionProperty != null) { + transactionBeginJobConfig.setConnectionProperties(this.queryProperties); + } else { + transactionBeginJobConfig.setCreateSession(true); + } + Job job = this.bigQuery.create(JobInfo.of(transactionBeginJobConfig.build())); + job = job.waitFor(); + Job transactionBeginJob = this.bigQuery.getJob(job.getJobId()); + if (this.sessionInfoConnectionProperty == null) { + this.sessionInfoConnectionProperty = + ConnectionProperty.newBuilder() + .setKey("session_id") + .setValue(transactionBeginJob.getStatistics().getSessionInfo().getSessionId()) + .build(); + this.queryProperties.add(this.sessionInfoConnectionProperty); + } + this.transactionStarted = true; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + + public boolean isTransactionStarted() { + return this.transactionStarted; + } + + boolean isSessionEnabled() { + return this.enableSession; + } + + boolean isUnsupportedHTAPIFallback() { + return this.unsupportedHTAPIFallback; + } + + ConnectionProperty getSessionInfoConnectionProperty() { + return this.sessionInfoConnectionProperty; + } + + boolean isEnableHighThroughputAPI() { + return this.enableHighThroughputAPI; + } + + boolean isUseQueryCache() { + return useQueryCache; + } + + boolean getUseStatelessQueryMode() { + return useStatelessQueryMode; + } + + boolean isAllowLargeResults() { + return allowLargeResults; + } + + String getQueryDialect() { + return queryDialect; + } + + Integer getNumBufferedRows() { + return numBufferedRows; + } + + int getHighThroughputMinTableSize() { + return highThroughputMinTableSize; + } + + String getAdditionalProjects() { + return this.additionalProjects; + } + + int getHighThroughputActivationRatio() { + return highThroughputActivationRatio; + } + + boolean isFilterTablesOnDefaultDataset() { + return this.filterTablesOnDefaultDataset; + } + + int isRequestGoogleDriveScope() { + return requestGoogleDriveScope; + } + + int getMetadataFetchThreadCount() { + return this.metadataFetchThreadCount; + } + + boolean isEnableWriteAPI() { + return enableWriteAPI; + } + + int getWriteAPIActivationRowCount() { + return writeAPIActivationRowCount; + } + + int getWriteAPIAppendRowCount() { + return writeAPIAppendRowCount; + } + + String getSSLTrustStorePath() { + return sslTrustStorePath; + } + + String getSSLTrustStorePassword() { + return sslTrustStorePassword; + } + + Integer getHttpConnectTimeout() { + return httpConnectTimeout; + } + + Integer getHttpReadTimeout() { + return httpReadTimeout; + } + + Long getConnectionPoolSize() { + return connectionPoolSize; + } + + Long getListenerPoolSize() { + return listenerPoolSize; + } + + @Override + public boolean isValid(int timeout) throws SQLException { + if (timeout < 0) { + throw new BigQueryJdbcException("timeout must be >= 0"); + } + if (!isClosed()) { + try (Statement statement = createStatement(); + ResultSet rs = statement.executeQuery("SELECT 1")) { + LOG.finest("Running validation query"); + // TODO(obada): set query timeout when it's implemented + // TODO(obada): use dry run + if (rs.next()) { + if (rs.getInt(1) == 1) { + return true; + } + } + } catch (SQLException ex) { + // Ignore + } + } + return false; + } + + @Override + public void abort(Executor executor) throws SQLException { + LOG.finest("++enter++"); + close(); + } + + // TODO: Throw exception translation of BigQueryJdbcSqlClientInfoException when implementing below + @Override + public void setClientInfo(String name, String value) {} + + @Override + public String getClientInfo(String name) { + return null; + } + + @Override + public String getCatalog() { + return this.catalog; + } + + @Override + public Properties getClientInfo() { + return null; + } + + @Override + public void setClientInfo(Properties properties) {} + + @Override + public SQLWarning getWarnings() { + return this.sqlWarnings.isEmpty() ? null : this.sqlWarnings.get(0); + } + + @Override + public void clearWarnings() { + this.sqlWarnings.clear(); + } + + @Override + public boolean getAutoCommit() { + checkClosed(); + return this.autoCommit; + } + + /** + * Sets this connection's auto-commit mode to the given state.
+ * If this method is called during a transaction and the auto-commit mode is changed, the + * transaction is committed. If setAutoCommit is called and the auto-commit mode is not changed, + * the call is a no-op. + * + * @param autoCommit {@code true} to enable auto-commit mode; {@code false} to disable it + * @see Connection#setAutoCommit(boolean) + */ + @Override + public void setAutoCommit(boolean autoCommit) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + checkIfEnabledSession("setAutoCommit"); + if (this.autoCommit == autoCommit) { + return; + } + + if (isTransactionStarted()) { + commitTransaction(); + } + + this.autoCommit = autoCommit; + if (!this.autoCommit) { + beginTransaction(); + } + } + + @Override + public void commit() { + LOG.finest("++enter++"); + checkClosed(); + checkIfEnabledSession("commit"); + if (!isTransactionStarted()) { + throw new IllegalStateException( + "Cannot commit without an active transaction. Please set setAutoCommit to false to start" + + " a transaction."); + } + commitTransaction(); + if (!getAutoCommit()) { + beginTransaction(); + } + } + + @Override + public void rollback() throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + checkIfEnabledSession("rollback"); + if (!isTransactionStarted()) { + throw new IllegalStateException( + "Cannot rollback without an active transaction. Please set setAutoCommit to false to" + + " start a transaction."); + } + try { + QueryJobConfiguration transactionRollbackJobConfig = + QueryJobConfiguration.newBuilder("ROLLBACK TRANSACTION;") + .setConnectionProperties(this.queryProperties) + .build(); + Job rollbackJob = this.bigQuery.create(JobInfo.of(transactionRollbackJobConfig)); + rollbackJob.waitFor(); + this.transactionStarted = false; + if (!getAutoCommit()) { + beginTransaction(); + } + } catch (InterruptedException | BigQueryException ex) { + throw new BigQueryJdbcException(ex); + } + } + + @Override + public DatabaseMetaData getMetaData() throws SQLException { + return new BigQueryDatabaseMetaData(this); + } + + @Override + public int getTransactionIsolation() { + // only supports Connection.TRANSACTION_SERIALIZABLE + return Connection.TRANSACTION_SERIALIZABLE; + } + + @Override + public void setTransactionIsolation(int level) throws SQLException { + if (level != Connection.TRANSACTION_SERIALIZABLE) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Transaction serializable not supported"); + } + this.transactionIsolation = level; + } + + @Override + public int getHoldability() { + return this.holdability; + } + + @Override + public void setHoldability(int holdability) throws SQLException { + if (holdability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "CLOSE_CURSORS_AT_COMMIT not supported"); + } + this.holdability = holdability; + } + + /** + * Releases this {@code BigQueryConnection} object's BigQuery resources immediately instead of + * waiting for them to be automatically released. + * + * @throws SQLException if a BigQuery access error occurs + * @see Connection#close() + */ + @Override + public void close() throws SQLException { + LOG.fine("Closing Connection " + this); + // TODO(neenu-postMVP): Release all connection state objects + // check for and close all existing transactions + + if (isClosed()) { + return; + } + try { + if (this.bigQueryReadClient != null) { + this.bigQueryReadClient.shutdown(); + this.bigQueryReadClient.awaitTermination(1, TimeUnit.MINUTES); + this.bigQueryReadClient.close(); + } + + if (this.bigQueryWriteClient != null) { + this.bigQueryWriteClient.shutdown(); + this.bigQueryWriteClient.awaitTermination(1, TimeUnit.MINUTES); + this.bigQueryWriteClient.close(); + } + + for (Statement statement : this.openStatements) { + statement.close(); + } + this.openStatements.clear(); + } catch (ConcurrentModificationException ex) { + throw new BigQueryJdbcException(ex); + } catch (InterruptedException e) { + throw new BigQueryJdbcRuntimeException(e); + } + this.isClosed = true; + } + + @Override + public boolean isClosed() { + return this.isClosed; + } + + private void checkClosed() { + if (isClosed()) { + throw new IllegalStateException("This " + getClass().getName() + " has been closed"); + } + } + + private void checkIfEnabledSession(String methodName) { + if (!this.enableSession) { + throw new IllegalStateException( + String.format("Session needs to be enabled to use %s method.", methodName)); + } + } + + private ConnectionProperty getSessionPropertyFromQueryProperties( + Map queryPropertiesMap) { + LOG.finest("++enter++"); + if (queryPropertiesMap != null) { + if (queryPropertiesMap.containsKey("session_id")) { + return ConnectionProperty.newBuilder() + .setKey("session_id") + .setValue(queryPropertiesMap.get("session_id")) + .build(); + } + } + return null; + } + + private List convertMapToConnectionPropertiesList( + Map queryPropertiesMap) { + LOG.finest("++enter++"); + List connectionProperties = new ArrayList(); + if (queryPropertiesMap != null) { + for (Map.Entry entry : queryPropertiesMap.entrySet()) { + connectionProperties.add( + ConnectionProperty.newBuilder() + .setKey(entry.getKey()) + .setValue(entry.getValue()) + .build()); + } + } + return connectionProperties; + } + + void removeStatement(Statement statement) { + this.openStatements.remove(statement); + } + + private BigQuery getBigQueryConnection() { + // 404 Not Found - check if the project exists + // 403 Forbidden - execute a dryRun to check if the user has bigquery.jobs.create permissions + BigQueryOptions.Builder bigQueryOptions = BigQueryOptions.newBuilder(); + if (this.retryTimeoutInSeconds > 0L + || (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L)) { + RetrySettings.Builder retry_settings_builder = RetrySettings.newBuilder(); + if (this.retryTimeoutInSeconds > 0L) { + retry_settings_builder.setTotalTimeoutDuration(this.retryTimeoutDuration); + } + if (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L) { + retry_settings_builder.setInitialRetryDelayDuration(retryInitialDelayDuration); + retry_settings_builder.setMaxRetryDelayDuration(retryMaxDelayDuration); + } + bigQueryOptions.setRetrySettings(retry_settings_builder.build()); + } + + if (this.catalog != null) { + bigQueryOptions.setProjectId(this.catalog); + } + if (this.credentials != null) { + bigQueryOptions.setCredentials(this.credentials); + } + if (this.location != null) { + bigQueryOptions.setLocation(this.location); + } + if (this.overrideProperties.containsKey( + BigQueryJdbcUrlUtility.BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME)) { + bigQueryOptions.setHost( + this.overrideProperties.get( + BigQueryJdbcUrlUtility.BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME)); + } + if (this.universeDomain != null) { + bigQueryOptions.setUniverseDomain(this.universeDomain); + } + if (this.httpTransportOptions != null) { + bigQueryOptions.setTransportOptions(this.httpTransportOptions); + } + + BigQueryOptions options = bigQueryOptions.setHeaderProvider(this.headerProvider).build(); + options.setDefaultJobCreationMode( + this.useStatelessQueryMode + ? JobCreationMode.JOB_CREATION_OPTIONAL + : JobCreationMode.JOB_CREATION_REQUIRED); + + return options.getService(); + } + + private BigQueryReadClient getBigQueryReadClientConnection() throws IOException { + BigQueryReadSettings.Builder bigQueryReadSettings = + BigQueryReadSettings.newBuilder().setHeaderProvider(this.headerProvider); + if (getRetrySettings() != null) { + bigQueryReadSettings.createReadSessionSettings().setRetrySettings(getRetrySettings()); + } + if (this.catalog != null) { + bigQueryReadSettings.setQuotaProjectId(this.catalog); + } + if (this.credentials != null) { + CredentialsProvider fixedProvider = FixedCredentialsProvider.create(credentials); + bigQueryReadSettings.setCredentialsProvider(fixedProvider); + } + if (this.overrideProperties.containsKey( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)) { + bigQueryReadSettings.setEndpoint( + this.overrideProperties.get( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)); + } + if (this.universeDomain != null) { + bigQueryReadSettings.setUniverseDomain(this.universeDomain); + } + TransportChannelProvider activeProvider = this.transportChannelProvider; + if (activeProvider == null) { + activeProvider = BigQueryReadSettings.defaultGrpcTransportProviderBuilder().build(); + } + + if (activeProvider instanceof InstantiatingGrpcChannelProvider) { + activeProvider = + ((InstantiatingGrpcChannelProvider) activeProvider) + .toBuilder() + .setKeepAliveTimeDuration(java.time.Duration.ofSeconds(10)) + .setKeepAliveTimeoutDuration(java.time.Duration.ofSeconds(5)) + .setKeepAliveWithoutCalls(true) + .build(); + } + + bigQueryReadSettings.setTransportChannelProvider(activeProvider); + + return BigQueryReadClient.create(bigQueryReadSettings.build()); + } + + private BigQueryWriteClient getBigQueryWriteClientConnection() throws IOException { + BigQueryWriteSettings.Builder bigQueryWriteSettings = + BigQueryWriteSettings.newBuilder().setHeaderProvider(this.headerProvider); + if (getRetrySettings() != null) { + bigQueryWriteSettings.createWriteStreamSettings().setRetrySettings(getRetrySettings()); + } + if (this.catalog != null) { + bigQueryWriteSettings.setQuotaProjectId(this.catalog); + } + if (this.credentials != null) { + CredentialsProvider fixedProvider = FixedCredentialsProvider.create(credentials); + bigQueryWriteSettings.setCredentialsProvider(fixedProvider); + } + // Same endpoint as READ API + if (this.overrideProperties.containsKey( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)) { + bigQueryWriteSettings.setEndpoint( + this.overrideProperties.get( + BigQueryJdbcUrlUtility.HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME)); + } + if (this.universeDomain != null) { + bigQueryWriteSettings.setUniverseDomain(this.universeDomain); + } + if (this.transportChannelProvider != null) { + bigQueryWriteSettings.setTransportChannelProvider(this.transportChannelProvider); + } + + return BigQueryWriteClient.create(bigQueryWriteSettings.build()); + } + + RetrySettings getRetrySettings() { + RetrySettings.Builder retrySettingsBuilder = null; + + if (this.retryTimeoutInSeconds > 0L + || (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L)) { + retrySettingsBuilder = RetrySettings.newBuilder(); + if (this.retryTimeoutInSeconds > 0L) { + retrySettingsBuilder.setTotalTimeoutDuration(this.retryTimeoutDuration); + } + if (this.retryInitialDelayInSeconds > 0L && this.retryMaxDelayInSeconds > 0L) { + retrySettingsBuilder.setInitialRetryDelayDuration(retryInitialDelayDuration); + retrySettingsBuilder.setMaxRetryDelayDuration(retryMaxDelayDuration); + } + } + return retrySettingsBuilder == null ? null : retrySettingsBuilder.build(); + } + + private void commitTransaction() { + try { + QueryJobConfiguration transactionCommitJobConfig = + QueryJobConfiguration.newBuilder("COMMIT TRANSACTION;") + .setConnectionProperties(this.queryProperties) + .build(); + Job commitJob = this.bigQuery.create(JobInfo.of(transactionCommitJobConfig)); + commitJob.waitFor(); + this.transactionStarted = false; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + + @Override + public CallableStatement prepareCall(String sql) throws SQLException { + checkClosed(); + CallableStatement currentStatement = new BigQueryCallableStatement(this, sql); + LOG.fine("Callable Statement %s created.", currentStatement); + addOpenStatements(currentStatement); + return currentStatement; + } + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported CallableStatement feature"); + } + return prepareCall(sql); + } + + @Override + public CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (resultSetType != ResultSet.TYPE_FORWARD_ONLY + || resultSetConcurrency != ResultSet.CONCUR_READ_ONLY + || resultSetHoldability != ResultSet.CLOSE_CURSORS_AT_COMMIT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported CallableStatement feature"); + } + return prepareCall(sql); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionProperty.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionProperty.java new file mode 100644 index 0000000000..3b94f2fef7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionProperty.java @@ -0,0 +1,129 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.util.List; +import java.util.function.Supplier; + +class BigQueryConnectionProperty { + + private final String name; + private final String description; + private final String defaultValue; + private final Supplier defaultValueSupplier; + private final List validValues; + + public String getName() { + return name; + } + + public String getDescription() { + return description; + } + + public String getDefaultValue() { + if (defaultValueSupplier != null) { + return defaultValueSupplier.get(); + } + return defaultValue; + } + + public List getValidValues() { + return validValues; + } + + BigQueryConnectionProperty(Builder builder) { + this.name = builder.name; + this.defaultValueSupplier = builder.defaultValueSupplier; + this.defaultValue = builder.defaultValue; + this.description = builder.description; + this.validValues = builder.validValues; + } + + /** Returns a builder for a BigQueryConnectionProperty object. */ + static BigQueryConnectionProperty.Builder newBuilder() { + return new BigQueryConnectionProperty.Builder(); + } + + BigQueryConnectionProperty.Builder toBuilder() { + return new BigQueryConnectionProperty.Builder(this); + } + + @Override + public String toString() { + return "BigQueryConnectionProperty{" + + "name='" + + name + + '\'' + + ", description='" + + description + + '\'' + + ", defaultValue='" + + defaultValue + + '\'' + + ", validValues=" + + validValues + + '}'; + } + + static final class Builder { + + private String name; + private String description; + private String defaultValue; + private Supplier defaultValueSupplier = null; + private List validValues; + + private Builder(BigQueryConnectionProperty bigQueryConnectionProperty) { + this.name = bigQueryConnectionProperty.name; + this.description = bigQueryConnectionProperty.description; + this.defaultValue = bigQueryConnectionProperty.defaultValue; + this.validValues = bigQueryConnectionProperty.validValues; + } + + private Builder() {} + + Builder setName(String name) { + this.name = name; + return this; + } + + Builder setDescription(String description) { + this.description = description; + return this; + } + + Builder setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + return this; + } + + Builder setLazyDefaultValue(Supplier defaultValueSupplier) { + this.defaultValueSupplier = defaultValueSupplier; + return this; + } + + Builder setValidValues(List validValues) { + this.validValues = validValues; + return this; + } + + BigQueryConnectionProperty build() { + return new BigQueryConnectionProperty(this); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTask.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTask.java new file mode 100644 index 0000000000..386785660a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTask.java @@ -0,0 +1,122 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.lang.ref.Reference; +import java.lang.ref.ReferenceQueue; +import java.util.List; + +/** + * This class is used to add polling threads required for polling the reference queues associated + * with the two ResultSets + */ +@InternalApi +class BigQueryDaemonPollingTask extends Thread { + + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryDaemonPollingTask.class.getName()); + + static ReferenceQueue referenceQueueArrowRs; + static ReferenceQueue referenceQueueJsonRs; + static List arrowRsFinalizers; + static List jsonRsFinalizers; + + private static BigQueryDaemonPollingTask arrowDaemon; + private static BigQueryDaemonPollingTask jsonDaemon; + + private static final Object LOCK = new Object(); + + private BigQueryDaemonPollingTask( + List arrowRsFinalizers, + ReferenceQueue referenceQueueArrowRs) { + BigQueryDaemonPollingTask.referenceQueueArrowRs = referenceQueueArrowRs; + BigQueryDaemonPollingTask.arrowRsFinalizers = arrowRsFinalizers; + setDaemon(true); + } + + private BigQueryDaemonPollingTask( + ReferenceQueue referenceQueueJsonRs, + List jsonRsFinalizers) { + BigQueryDaemonPollingTask.referenceQueueJsonRs = referenceQueueJsonRs; + BigQueryDaemonPollingTask.jsonRsFinalizers = jsonRsFinalizers; + setDaemon(true); + } + + /** + * ThreadSafe method which creates two instances of polling task, one each for each type of + * ResultSet + * + * @param referenceQueueArrowRs ReferenceQueue for ArrowResultSet + * @param referenceQueueJsonRs ReferenceQueue for JsonResultSet + * @param arrowRsFinalizers Finalizer for ArrowResultSet + * @param jsonRsFinalizers Finalizer for JsonResultSet + * @return true if the tasks were created + */ + public static boolean startGcDaemonTask( + ReferenceQueue referenceQueueArrowRs, + ReferenceQueue referenceQueueJsonRs, + List arrowRsFinalizers, + List jsonRsFinalizers) { + LOG.finest("++enter++"); + synchronized (LOCK) { + // 2 Background threads will be required to monitor the respective queues + if (arrowDaemon == null && jsonDaemon == null) { + arrowDaemon = new BigQueryDaemonPollingTask(arrowRsFinalizers, referenceQueueArrowRs); + arrowDaemon.start(); + + jsonDaemon = new BigQueryDaemonPollingTask(referenceQueueJsonRs, jsonRsFinalizers); + jsonDaemon.start(); + + return true; + } + } + // Task(s) are already initialised + return false; + } + + @Override + public void run() { + + Reference reference; + try { + LOG.finest("++enter++"); + // poll for Arrow ResultSets + if (referenceQueueArrowRs != null) { + + while ((reference = referenceQueueArrowRs.remove()) != null) { + LOG.fine("Clearing Arrow ResultSet reference " + referenceQueueArrowRs); + ((BigQueryResultSetFinalizers.ArrowResultSetFinalizer) reference).finalizeResources(); + reference.clear(); + } + } + // poll for JSON ResultSets + else if (referenceQueueJsonRs != null) { + while ((reference = referenceQueueJsonRs.remove()) != null) { + LOG.fine("Clearing Json ResultSet reference " + referenceQueueJsonRs); + ((BigQueryResultSetFinalizers.JsonResultSetFinalizer) reference).finalizeResources(); + reference.clear(); + } + } else { + throw new BigQueryJdbcRuntimeException("Null Reference Queue"); + } + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaData.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaData.java new file mode 100644 index 0000000000..feca69ea12 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaData.java @@ -0,0 +1,5289 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.gax.paging.Page; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQuery.DatasetListOption; +import com.google.cloud.bigquery.BigQuery.RoutineListOption; +import com.google.cloud.bigquery.BigQuery.TableListOption; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.Routine; +import com.google.cloud.bigquery.RoutineArgument; +import com.google.cloud.bigquery.RoutineId; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLDataType; +import com.google.cloud.bigquery.StandardSQLField; +import com.google.cloud.bigquery.StandardSQLTableType; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableDefinition; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.RowIdLifetime; +import java.sql.SQLException; +import java.sql.Types; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashSet; +import java.util.List; +import java.util.Properties; +import java.util.Scanner; +import java.util.Set; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.Callable; +import java.util.concurrent.CancellationException; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingQueue; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.regex.Pattern; +import javax.annotation.Nullable; + +/** + * An implementation of {@link java.sql.DatabaseMetaData}. This interface is implemented by driver + * vendors to let users know the capabilities of a Database Management System (DBMS) in combination + * with the driver based on JDBC™ technology ("JDBC driver") that is used with it. + * + * @see BigQueryStatement + */ +// TODO(neenu): test and verify after post MVP implementation. +class BigQueryDatabaseMetaData implements DatabaseMetaData { + final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private static final String DATABASE_PRODUCT_NAME = "Google BigQuery"; + private static final String DATABASE_PRODUCT_VERSION = "2.0"; + private static final String DRIVER_NAME = "GoogleJDBCDriverForGoogleBigQuery"; + private static final String DRIVER_DEFAULT_VERSION = "0.0.0"; + private static final String SCHEMA_TERM = "Dataset"; + private static final String CATALOG_TERM = "Project"; + private static final String PROCEDURE_TERM = "Procedure"; + private static final String GET_PRIMARY_KEYS_SQL = "DatabaseMetaData_GetPrimaryKeys.sql"; + private static final String GET_IMPORTED_KEYS_SQL = "DatabaseMetaData_GetImportedKeys.sql"; + private static final String GET_EXPORTED_KEYS_SQL = "DatabaseMetaData_GetExportedKeys.sql"; + private static final String GET_CROSS_REFERENCE_SQL = "DatabaseMetaData_GetCrossReference.sql"; + private static final int API_EXECUTOR_POOL_SIZE = 50; + private static final int DEFAULT_PAGE_SIZE = 500; + private static final int DEFAULT_QUEUE_CAPACITY = 5000; + // Declared package-private for testing. + static final String GOOGLE_SQL_QUOTED_IDENTIFIER = "`"; + // Does not include SQL:2003 Keywords as per JDBC spec. + // https://en.wikipedia.org/wiki/List_of_SQL_reserved_words + static final String GOOGLE_SQL_RESERVED_KEYWORDS = + "ASC,ASSERT_ROWS_MODIFIED,DESC,ENUM,EXCLUDE,FOLLOWING,HASH,IF," + + "IGNORE,LIMIT,LOOKUP,NULLS,PRECEDING,PROTO,QUALIFY,RESPECT,STRUCT,UNBOUNDED"; + static final String GOOGLE_SQL_NUMERIC_FNS = + "ABS,ACOS,ACOSH,ASIN,ASINH,ATAN,ATAN2,ATANH,CBRT,CEIL,CEILING,COS" + + ",COSH,COSINE_DISTANCE,COT,COTH,CSC,CSCH,DIV,EXP,EUCLIDEAN_DISTANCE,FLOOR" + + ",GREATEST,IS_INF,LEAST,LN,LOG,LOG10,MOD,POW,RAND,RANGE_BUCKET,ROUND," + + ",SAFE_ADD,SAFE_DIVIDE,SAFE_MULTIPLY,SAFE_NEGATE,SAFE_SUBTRACT,SEC,SECH," + + "SIGN,SIN,SINH,SQRT,TAN,TANH,TRUNC"; + static final String GOOGLE_SQL_STRING_FNS = + "ASCII,BYTE_LENGTH,CHAR_LENGTH,CHARACTER_LENGTH,CHR,CODE_POINTS_TO_BYTES," + + "CODE_POINTS_TO_STRING,COLLATE,CONCAT,CONTAINS_SUBSTR,EDIT_DISTANCE,ENDS_WITH," + + "FORMAT,FROM_BASE32,FROM_BASE64,FROM_HEX,INITCAP,INSTR,LEFT,LENGTH,LOWER," + + "LPAD,LTRIM,NORMALIZ,NORMALIZE_AND_CASEFOLD,OCTET_LENGTH,REGEXP_CONTAINS," + + "REGEXP_EXTRACT,REGEXP_EXTRACT_ALL,REGEXP_INSTR,REGEXP_REPLACE,REGEXP_SUBSTR," + + "REPEAT,REPLACE,REVERSE,RIGHT,RPAD,RTRIM,SAFE_CONVERT_BYTES_TO_STRING,SOUNDEX," + + "SPLIT,STARTS_WITH,STRPOS,SUBSTR,SUBSTRING,TO_BASE32,TO_BASE64,TO_CODE_POINTS," + + "TO_HEX,TRANSLATE,TRIMunicode,UNICODE,UPPER"; + static final String GOOGLE_SQL_TIME_DATE_FNS = + "DATE,DATE_ADD,DATE_BUCKET,DATE_DIFF,DATE_FROM_UNIX_DATE," + + "DATE_SUB,DATE_TRUNC,DATETIME,DATETIME_ADD.,DATETIME_BUCKET," + + "DATETIME_DIFF,DATETIME_SUB,DATETIME_TRUNC,CURRENT_DATE,CURRENT_DATETIME," + + "CURRENT_TIME,CURRENT_TIMESTAMP,CURRENT_TIME,EXTRACT,FORMAT_TIME,PARSE_TIME," + + "TIME,TIME_ADD,TIME_DIFF,TIME_SUB,TIME_TRUNC,CURRENT_TIMESTAMP,EXTRACT," + + "FORMAT_TIMESTAMP,GENERATE_TIMESTAMP_ARRAY,PARSE_TIMESTAMP,TIMESTAMP," + + "TIMESTAMP_ADD,TIMESTAMP_DIFF,TIMESTAMP_MICROS,TIMESTAMP_MILLIS,TIMESTAMP_SECONDS," + + "TIMESTAMP_SUB,TIMESTAMP_TRUNC,UNIX_MICROS,UNIX_MILLIS,UNIX_SECONDS"; + static final String GOOGLE_SQL_ESCAPE = "\\"; + static final String GOOGLE_SQL_CATALOG_SEPARATOR = "."; + static final int GOOGLE_SQL_MAX_COL_NAME_LEN = 300; + static final int GOOGLE_SQL_MAX_COLS_PER_TABLE = 10000; + + String URL; + BigQueryConnection connection; + private final BigQueryStatement statement; + private final BigQuery bigquery; + private final int metadataFetchThreadCount; + private static final AtomicReference parsedDriverVersion = new AtomicReference<>(null); + private static final AtomicReference parsedDriverMajorVersion = + new AtomicReference<>(null); + private static final AtomicReference parsedDriverMinorVersion = + new AtomicReference<>(null); + + BigQueryDatabaseMetaData(BigQueryConnection connection) throws SQLException { + this.URL = connection.getConnectionUrl(); + this.connection = connection; + this.statement = connection.createStatement().unwrap(BigQueryStatement.class); + this.bigquery = connection.getBigQuery(); + this.metadataFetchThreadCount = connection.getMetadataFetchThreadCount(); + loadDriverVersionProperties(); + } + + @Override + public boolean allProceduresAreCallable() { + // Returns false because BigQuery's IAM permissions can allow a user + // to discover a procedure's existence without having rights to execute it. + return false; + } + + @Override + public boolean allTablesAreSelectable() { + // Returns true to ensure maximum compatibility with client applications + // that expect a positive response to discover and list all available tables. + return true; + } + + @Override + public String getURL() { + return this.URL; + } + + @Override + public String getUserName() { + return null; + } + + @Override + public boolean isReadOnly() { + return false; + } + + @Override + public boolean nullsAreSortedHigh() { + return false; + } + + @Override + public boolean nullsAreSortedLow() { + return false; + } + + @Override + public boolean nullsAreSortedAtStart() { + return false; + } + + @Override + public boolean nullsAreSortedAtEnd() { + return false; + } + + @Override + public String getDatabaseProductName() { + return DATABASE_PRODUCT_NAME; + } + + @Override + public String getDatabaseProductVersion() { + return DATABASE_PRODUCT_VERSION; + } + + @Override + public String getDriverName() { + return DRIVER_NAME; + } + + @Override + public String getDriverVersion() { + return parsedDriverVersion.get() != null ? parsedDriverVersion.get() : DRIVER_DEFAULT_VERSION; + } + + @Override + public int getDriverMajorVersion() { + return parsedDriverMajorVersion.get() != null ? parsedDriverMajorVersion.get() : 0; + } + + @Override + public int getDriverMinorVersion() { + return parsedDriverMinorVersion.get() != null ? parsedDriverMinorVersion.get() : 0; + } + + @Override + public boolean usesLocalFiles() { + return false; + } + + @Override + public boolean usesLocalFilePerTable() { + return false; + } + + @Override + public boolean supportsMixedCaseIdentifiers() { + return false; + } + + @Override + public boolean storesUpperCaseIdentifiers() { + return false; + } + + @Override + public boolean storesLowerCaseIdentifiers() { + return false; + } + + @Override + public boolean storesMixedCaseIdentifiers() { + return false; + } + + @Override + public boolean supportsMixedCaseQuotedIdentifiers() { + return false; + } + + @Override + public boolean storesUpperCaseQuotedIdentifiers() { + return false; + } + + @Override + public boolean storesLowerCaseQuotedIdentifiers() { + return false; + } + + @Override + public boolean storesMixedCaseQuotedIdentifiers() { + return false; + } + + @Override + public String getIdentifierQuoteString() { + return GOOGLE_SQL_QUOTED_IDENTIFIER; + } + + @Override + public String getSQLKeywords() { + return GOOGLE_SQL_RESERVED_KEYWORDS; + } + + @Override + public String getNumericFunctions() { + return GOOGLE_SQL_NUMERIC_FNS; + } + + @Override + public String getStringFunctions() { + return GOOGLE_SQL_STRING_FNS; + } + + @Override + // GoogleSQL has UDF (user defined functions). + // System functions like DATABASE(), USER() are not supported. + public String getSystemFunctions() { + return null; + } + + @Override + public String getTimeDateFunctions() { + return GOOGLE_SQL_TIME_DATE_FNS; + } + + @Override + public String getSearchStringEscape() { + return GOOGLE_SQL_ESCAPE; + } + + @Override + // No extra characters beyond a-z, A-Z, 0-9 and _ + public String getExtraNameCharacters() { + return null; + } + + @Override + public boolean supportsAlterTableWithAddColumn() { + return true; + } + + @Override + public boolean supportsAlterTableWithDropColumn() { + return true; + } + + @Override + public boolean supportsColumnAliasing() { + return true; + } + + @Override + public boolean nullPlusNonNullIsNull() { + return true; + } + + @Override + public boolean supportsConvert() { + return false; + } + + @Override + public boolean supportsConvert(int fromType, int toType) { + return false; + } + + @Override + public boolean supportsTableCorrelationNames() { + return true; + } + + @Override + public boolean supportsDifferentTableCorrelationNames() { + return false; + } + + @Override + public boolean supportsExpressionsInOrderBy() { + return true; + } + + @Override + public boolean supportsOrderByUnrelated() { + return true; + } + + @Override + public boolean supportsGroupBy() { + return true; + } + + @Override + public boolean supportsGroupByUnrelated() { + return true; + } + + @Override + public boolean supportsGroupByBeyondSelect() { + return true; + } + + @Override + public boolean supportsLikeEscapeClause() { + return false; + } + + @Override + public boolean supportsMultipleResultSets() { + return false; + } + + @Override + public boolean supportsMultipleTransactions() { + return false; + } + + @Override + public boolean supportsNonNullableColumns() { + return false; + } + + @Override + public boolean supportsMinimumSQLGrammar() { + return false; + } + + @Override + public boolean supportsCoreSQLGrammar() { + return false; + } + + @Override + public boolean supportsExtendedSQLGrammar() { + return false; + } + + @Override + public boolean supportsANSI92EntryLevelSQL() { + return false; + } + + @Override + public boolean supportsANSI92IntermediateSQL() { + return false; + } + + @Override + public boolean supportsANSI92FullSQL() { + return false; + } + + @Override + public boolean supportsIntegrityEnhancementFacility() { + return false; + } + + @Override + public boolean supportsOuterJoins() { + return false; + } + + @Override + public boolean supportsFullOuterJoins() { + return false; + } + + @Override + public boolean supportsLimitedOuterJoins() { + return false; + } + + @Override + public String getSchemaTerm() { + return SCHEMA_TERM; + } + + @Override + public String getProcedureTerm() { + return PROCEDURE_TERM; + } + + @Override + public String getCatalogTerm() { + return CATALOG_TERM; + } + + @Override + public boolean isCatalogAtStart() { + return true; + } + + @Override + public String getCatalogSeparator() { + return GOOGLE_SQL_CATALOG_SEPARATOR; + } + + @Override + public boolean supportsSchemasInDataManipulation() { + return false; + } + + @Override + public boolean supportsSchemasInProcedureCalls() { + return false; + } + + @Override + public boolean supportsSchemasInTableDefinitions() { + return false; + } + + @Override + public boolean supportsSchemasInIndexDefinitions() { + return false; + } + + @Override + public boolean supportsSchemasInPrivilegeDefinitions() { + return false; + } + + @Override + public boolean supportsCatalogsInDataManipulation() { + return false; + } + + @Override + public boolean supportsCatalogsInProcedureCalls() { + return false; + } + + @Override + public boolean supportsCatalogsInTableDefinitions() { + return false; + } + + @Override + public boolean supportsCatalogsInIndexDefinitions() { + return false; + } + + @Override + public boolean supportsCatalogsInPrivilegeDefinitions() { + return false; + } + + @Override + public boolean supportsPositionedDelete() { + return false; + } + + @Override + public boolean supportsPositionedUpdate() { + return false; + } + + @Override + public boolean supportsSelectForUpdate() { + return false; + } + + @Override + public boolean supportsStoredProcedures() { + return false; + } + + @Override + public boolean supportsSubqueriesInComparisons() { + return false; + } + + @Override + public boolean supportsSubqueriesInExists() { + return false; + } + + @Override + public boolean supportsSubqueriesInIns() { + return false; + } + + @Override + public boolean supportsSubqueriesInQuantifieds() { + return false; + } + + @Override + public boolean supportsCorrelatedSubqueries() { + return false; + } + + @Override + public boolean supportsUnion() { + return true; + } + + @Override + public boolean supportsUnionAll() { + return true; + } + + @Override + public boolean supportsOpenCursorsAcrossCommit() { + return false; + } + + @Override + public boolean supportsOpenCursorsAcrossRollback() { + return false; + } + + @Override + public boolean supportsOpenStatementsAcrossCommit() { + return false; + } + + @Override + public boolean supportsOpenStatementsAcrossRollback() { + return false; + } + + @Override + // No limit + public int getMaxBinaryLiteralLength() { + return 0; + } + + @Override + // No Limit + public int getMaxCharLiteralLength() { + return 0; + } + + @Override + // GoogleSQL documentation says 300. + // https://cloud.google.com/bigquery/quotas#all_tables + public int getMaxColumnNameLength() { + return GOOGLE_SQL_MAX_COL_NAME_LEN; + } + + @Override + // No specific limits for group by. + public int getMaxColumnsInGroupBy() { + return 0; + } + + @Override + // No specific limits for index. + public int getMaxColumnsInIndex() { + return 0; + } + + @Override + // No specific limit for Order By. + public int getMaxColumnsInOrderBy() { + return 0; + } + + @Override + // All columns can be selected. No specific limits. + public int getMaxColumnsInSelect() { + return 0; + } + + @Override + public int getMaxColumnsInTable() { + return GOOGLE_SQL_MAX_COLS_PER_TABLE; + } + + @Override + public int getMaxConnections() { + // Per JDBC spec, returns 0 as there is no connection limit or is unknown. + return 0; + } + + @Override + public int getMaxCursorNameLength() { + // BigQuery does not support named cursors or positioned updates/deletes. + return 0; + } + + @Override + public int getMaxIndexLength() { + // Per the JDBC spec, 0 indicates this feature is not supported. + return 0; + } + + @Override + public int getMaxSchemaNameLength() { + // Dataset IDs can be up to 1024 characters long. + // See: https://cloud.google.com/bigquery/docs/datasets#dataset-naming + return 1024; + } + + @Override + public int getMaxProcedureNameLength() { + // Routine IDs can be up to 256 characters long. + // See: + // https://cloud.google.com/bigquery/docs/reference/rest/v2/routines#RoutineReference.FIELDS.routine_id + return 256; + } + + @Override + public int getMaxCatalogNameLength() { + // Corresponds to the BigQuery Project ID, which can be a maximum of 30 characters. + // See: + // https://cloud.google.com/resource-manager/docs/creating-managing-projects#before_you_begin + return 30; + } + + @Override + public int getMaxRowSize() { + // Per JDBC spec, returns 0 as there is no fixed limit or is unknown. + return 0; + } + + @Override + public boolean doesMaxRowSizeIncludeBlobs() { + return false; + } + + @Override + public int getMaxStatementLength() { + // Per JDBC spec, returns 0 as there is no fixed limit or is unknown. + // See: https://cloud.google.com/bigquery/quotas#query_jobs + return 0; + } + + @Override + public int getMaxStatements() { + // Per JDBC spec, returns 0 as there is no fixed limit or is unknown. + return 0; + } + + @Override + public int getMaxTableNameLength() { + // Table IDs can be up to 1024 characters long. + // See: https://cloud.google.com/bigquery/docs/tables#table-naming + return 1024; + } + + @Override + public int getMaxTablesInSelect() { + // BigQuery allows up to 1,000 tables to be referenced per query. + // See: https://cloud.google.com/bigquery/quotas#query_jobs + return 1000; + } + + @Override + public int getMaxUserNameLength() { + return 0; + } + + @Override + public int getDefaultTransactionIsolation() { + return Connection.TRANSACTION_SERIALIZABLE; + } + + @Override + public boolean supportsTransactions() { + return true; + } + + @Override + public boolean supportsTransactionIsolationLevel(int level) { + return level == Connection.TRANSACTION_SERIALIZABLE; + } + + @Override + public boolean supportsDataDefinitionAndDataManipulationTransactions() { + return false; + } + + @Override + public boolean supportsDataManipulationTransactionsOnly() { + return false; + } + + @Override + public boolean dataDefinitionCausesTransactionCommit() { + return false; + } + + @Override + public boolean dataDefinitionIgnoredInTransactions() { + return false; + } + + @Override + public ResultSet getProcedures( + String catalog, String schemaPattern, String procedureNamePattern) { + if ((catalog == null || catalog.isEmpty()) + || (schemaPattern != null && schemaPattern.isEmpty()) + || (procedureNamePattern != null && procedureNamePattern.isEmpty())) { + LOG.warning("Returning empty ResultSet as catalog is null/empty or a pattern is empty."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + "getProcedures called for catalog: %s, schemaPattern: %s, procedureNamePattern: %s", + catalog, schemaPattern, procedureNamePattern); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern procedureNameRegex = compileSqlLikePattern(procedureNamePattern); + final Schema resultSchema = defineGetProceduresSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable procedureFetcher = + () -> { + ExecutorService apiExecutor = null; + ExecutorService routineProcessorExecutor = null; + final FieldList localResultSchemaFields = resultSchemaFields; + final List>> apiFutures = new ArrayList<>(); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Finishing."); + return; + } + + apiExecutor = Executors.newFixedThreadPool(API_EXECUTOR_POOL_SIZE); + routineProcessorExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + LOG.fine("Submitting parallel findMatchingRoutines tasks..."); + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Fetcher interrupted during dataset iteration submission."); + break; + } + + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> apiCallable = + () -> + findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), + currentDatasetId.getDataset(), + name)), + (rt) -> rt.getRoutineId().getRoutine(), + procedureNamePattern, + procedureNameRegex, + LOG); + Future> apiFuture = apiExecutor.submit(apiCallable); + apiFutures.add(apiFuture); + } + LOG.fine("Finished submitting " + apiFutures.size() + " findMatchingRoutines tasks."); + apiExecutor.shutdown(); + + LOG.fine("Processing results from findMatchingRoutines tasks..."); + for (Future> apiFuture : apiFutures) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Fetcher interrupted while processing API futures."); + break; + } + try { + List routinesResult = apiFuture.get(); + if (routinesResult != null) { + for (Routine routine : routinesResult) { + if (Thread.currentThread().isInterrupted()) break; + + if ("PROCEDURE".equalsIgnoreCase(routine.getRoutineType())) { + LOG.fine( + "Submitting processing task for procedure: " + routine.getRoutineId()); + final Routine finalRoutine = routine; + Future processFuture = + routineProcessorExecutor.submit( + () -> + processProcedureInfo( + finalRoutine, collectedResults, localResultSchemaFields)); + processingTaskFutures.add(processFuture); + } else { + LOG.finer("Skipping non-procedure routine: " + routine.getRoutineId()); + } + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Fetcher thread interrupted while waiting for API future result."); + break; + } catch (ExecutionException e) { + LOG.warning( + "Error executing findMatchingRoutines task: " + + e.getMessage() + + ". Cause: " + + e.getCause()); + } catch (CancellationException e) { + LOG.warning("A findMatchingRoutines task was cancelled."); + } + } + + LOG.fine( + "Finished submitting " + + processingTaskFutures.size() + + " processProcedureInfo tasks."); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher interrupted before waiting for processing tasks; cancelling remaining."); + processingTaskFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine("Waiting for processProcedureInfo tasks to complete..."); + waitForTasksCompletion(processingTaskFutures); + LOG.fine("All processProcedureInfo tasks completed or handled."); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetProceduresComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getProcedures", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in procedure fetcher runnable: " + t.getMessage()); + apiFutures.forEach(f -> f.cancel(true)); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(apiExecutor); + shutdownExecutor(routineProcessorExecutor); + LOG.info("Procedure fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(procedureFetcher, "getProcedures-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getProcedures"); + return resultSet; + } + + Schema defineGetProceduresSchema() { + List fields = new ArrayList<>(9); + fields.add( + Field.newBuilder("PROCEDURE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("reserved1", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("reserved2", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("reserved3", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + void processProcedureInfo( + Routine routine, List collectedResults, FieldList resultSchemaFields) { + + RoutineId routineId = routine.getRoutineId(); + LOG.fine("Processing procedure info for: " + routineId); + + try { + if (!"PROCEDURE".equalsIgnoreCase(routine.getRoutineType())) { + LOG.warning( + "processProcedureInfo called with non-procedure type: " + + routine.getRoutineType() + + " for " + + routineId); + return; + } + + String catalogName = routineId.getProject(); + String schemaName = routineId.getDataset(); + String procedureName = routineId.getRoutine(); + String remarks = routine.getDescription(); + + List values = new ArrayList<>(resultSchemaFields.size()); + + values.add(createStringFieldValue(catalogName)); // 1. PROCEDURE_CAT + values.add(createStringFieldValue(schemaName)); // 2. PROCEDURE_SCHEM + values.add(createStringFieldValue(procedureName)); // 3. PROCEDURE_NAME + values.add(createNullFieldValue()); // 4. reserved1 + values.add(createNullFieldValue()); // 5. reserved2 + values.add(createNullFieldValue()); // 6. reserved3 + values.add(createStringFieldValue(remarks)); // 7. REMARKS + values.add( + createLongFieldValue( + (long) DatabaseMetaData.procedureResultUnknown)); // 8. PROCEDURE_TYPE + values.add(createStringFieldValue(procedureName)); // 9. SPECIFIC_NAME + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + + LOG.fine("Processed and added procedure info row for: " + routineId); + + } catch (Exception e) { + LOG.warning( + "Error processing procedure info for %s: %s. Skipping this procedure.", + routineId, e.getMessage()); + } + } + + Comparator defineGetProceduresComparator(FieldList resultSchemaFields) { + final int PROC_CAT_IDX = resultSchemaFields.getIndex("PROCEDURE_CAT"); + final int PROC_SCHEM_IDX = resultSchemaFields.getIndex("PROCEDURE_SCHEM"); + final int PROC_NAME_IDX = resultSchemaFields.getIndex("PROCEDURE_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public ResultSet getProcedureColumns( + String catalog, String schemaPattern, String procedureNamePattern, String columnNamePattern) { + + if (catalog == null || catalog.isEmpty()) { + LOG.warning("Returning empty ResultSet because catalog (project) is null or empty."); + return new BigQueryJsonResultSet(); + } + if ((schemaPattern != null && schemaPattern.isEmpty()) + || (procedureNamePattern != null && procedureNamePattern.isEmpty()) + || (columnNamePattern != null && columnNamePattern.isEmpty())) { + LOG.warning("Returning empty ResultSet because an explicit empty pattern was provided."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + "getProcedureColumns called for catalog: %s, schemaPattern: %s, procedureNamePattern:" + + " %s, columnNamePattern: %s", + catalog, schemaPattern, procedureNamePattern, columnNamePattern); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern procedureNameRegex = compileSqlLikePattern(procedureNamePattern); + final Pattern columnNameRegex = compileSqlLikePattern(columnNamePattern); + + final Schema resultSchema = defineGetProcedureColumnsSchema(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable procedureColumnFetcher = + () -> { + ExecutorService listRoutinesExecutor = null; + ExecutorService getRoutineDetailsExecutor = null; + ExecutorService processArgsExecutor = null; + + final String fetcherThreadNameSuffix = + "-" + catalogParam.substring(0, Math.min(10, catalogParam.length())); + + try { + List datasetsToScan = + fetchMatchingDatasetsForProcedureColumns(catalogParam, schemaPattern, schemaRegex); + if (datasetsToScan.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No matching datasets or interrupted early. Catalog: " + catalogParam); + return; + } + + listRoutinesExecutor = + Executors.newFixedThreadPool( + API_EXECUTOR_POOL_SIZE, + runnable -> new Thread(runnable, "pcol-list-rout" + fetcherThreadNameSuffix)); + List procedureIdsToGet = + listMatchingProcedureIdsFromDatasets( + datasetsToScan, + procedureNamePattern, + procedureNameRegex, + listRoutinesExecutor, + catalogParam, + LOG); + shutdownExecutor(listRoutinesExecutor); + listRoutinesExecutor = null; + + if (procedureIdsToGet.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info("Fetcher: No procedure IDs found or interrupted. Catalog: " + catalogParam); + return; + } + + getRoutineDetailsExecutor = + Executors.newFixedThreadPool( + 100, + runnable -> new Thread(runnable, "pcol-get-details" + fetcherThreadNameSuffix)); + List fullRoutines = + fetchFullRoutineDetailsForIds(procedureIdsToGet, getRoutineDetailsExecutor, LOG); + shutdownExecutor(getRoutineDetailsExecutor); + getRoutineDetailsExecutor = null; + + if (fullRoutines.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No full routines fetched or interrupted. Catalog: " + catalogParam); + return; + } + + processArgsExecutor = + Executors.newFixedThreadPool( + this.metadataFetchThreadCount, + runnable -> new Thread(runnable, "pcol-proc-args" + fetcherThreadNameSuffix)); + submitProcedureArgumentProcessingJobs( + fullRoutines, + columnNameRegex, + collectedResults, + resultSchema.getFields(), + processArgsExecutor, + processingTaskFutures, + LOG); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher: Interrupted before waiting for argument processing. Catalog: " + + catalogParam); + processingTaskFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine( + "Fetcher: Waiting for " + + processingTaskFutures.size() + + " argument processing tasks. Catalog: " + + catalogParam); + waitForTasksCompletion(processingTaskFutures); + LOG.fine( + "Fetcher: All argument processing tasks completed or handled. Catalog: " + + catalogParam); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetProcedureColumnsComparator(resultSchema.getFields()); + sortResults(collectedResults, comparator, "getProcedureColumns", LOG); + populateQueue(collectedResults, queue, resultSchema.getFields()); + } + + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Fetcher: Interrupted in main try block for catalog " + + catalogParam + + ". Error: " + + e.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } catch (Throwable t) { + LOG.severe( + "Fetcher: Unexpected error in main try block for catalog " + + catalogParam + + ". Error: " + + t.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, resultSchema.getFields()); + if (listRoutinesExecutor != null) shutdownExecutor(listRoutinesExecutor); + if (getRoutineDetailsExecutor != null) shutdownExecutor(getRoutineDetailsExecutor); + if (processArgsExecutor != null) shutdownExecutor(processArgsExecutor); + LOG.info("Procedure column fetcher thread finished for catalog: " + catalogParam); + } + }; + + Thread fetcherThread = + new Thread(procedureColumnFetcher, "getProcedureColumns-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getProcedureColumns for catalog: " + catalog); + return resultSet; + } + + private List fetchMatchingDatasetsForProcedureColumns( + String catalogParam, String schemaPattern, Pattern schemaRegex) throws InterruptedException { + LOG.fine( + "Fetching matching datasets for catalog '%s', schemaPattern '%s'", + catalogParam, schemaPattern); + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets(catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + LOG.info( + "Found %d datasets to scan for procedures in catalog '%s'.", + datasetsToScan.size(), catalogParam); + return datasetsToScan; + } + + List listMatchingProcedureIdsFromDatasets( + List datasetsToScan, + String procedureNamePattern, + Pattern procedureNameRegex, + ExecutorService listRoutinesExecutor, + String catalogParam, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + + logger.fine( + "Listing matching procedure IDs from %d datasets for catalog '%s'.", + datasetsToScan.size(), catalogParam); + final List>> listRoutineFutures = new ArrayList<>(); + final List procedureIdsToGet = Collections.synchronizedList(new ArrayList<>()); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted during submission of routine listing tasks for catalog: " + catalogParam); + throw new InterruptedException("Interrupted while listing routines"); + } + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> listCallable = + () -> + findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), currentDatasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + procedureNamePattern, + procedureNameRegex, + logger); + listRoutineFutures.add(listRoutinesExecutor.submit(listCallable)); + } + logger.fine( + "Submitted " + + listRoutineFutures.size() + + " routine list tasks for catalog: " + + catalogParam); + + for (Future> listFuture : listRoutineFutures) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted while collecting routine list results for catalog: " + catalogParam); + listRoutineFutures.forEach(f -> f.cancel(true)); + throw new InterruptedException("Interrupted while collecting routine lists"); + } + try { + List listedRoutines = listFuture.get(); + if (listedRoutines != null) { + for (Routine listedRoutine : listedRoutines) { + if (listedRoutine != null + && "PROCEDURE".equalsIgnoreCase(listedRoutine.getRoutineType())) { + if (listedRoutine.getRoutineId() != null) { + procedureIdsToGet.add(listedRoutine.getRoutineId()); + } else { + logger.warning( + "Found a procedure type routine with a null ID during listing phase for" + + " catalog: " + + catalogParam); + } + } + } + } + } catch (ExecutionException e) { + logger.warning( + "Error getting routine list result for catalog " + catalogParam + ": " + e.getCause()); + } catch (CancellationException e) { + logger.warning("Routine list task cancelled for catalog: " + catalogParam); + } + } + logger.info( + "Found %d procedure IDs to fetch details for in catalog '%s'.", + procedureIdsToGet.size(), catalogParam); + return procedureIdsToGet; + } + + List fetchFullRoutineDetailsForIds( + List procedureIdsToGet, + ExecutorService getRoutineDetailsExecutor, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + logger.fine("Fetching full details for %d procedure IDs.", procedureIdsToGet.size()); + final List> getRoutineFutures = new ArrayList<>(); + final List fullRoutines = Collections.synchronizedList(new ArrayList<>()); + + for (RoutineId procId : procedureIdsToGet) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted during submission of getRoutine detail tasks."); + throw new InterruptedException("Interrupted while submitting getRoutine tasks"); + } + final RoutineId currentProcId = procId; + Callable getCallable = + () -> { + try { + return bigquery.getRoutine(currentProcId); + } catch (Exception e) { + logger.warning( + "Failed to get full details for routine " + + currentProcId + + ": " + + e.getMessage()); + return null; + } + }; + getRoutineFutures.add(getRoutineDetailsExecutor.submit(getCallable)); + } + logger.fine("Submitted " + getRoutineFutures.size() + " getRoutine detail tasks."); + + for (Future getFuture : getRoutineFutures) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted while collecting getRoutine detail results."); + getRoutineFutures.forEach(f -> f.cancel(true)); // Cancel remaining + throw new InterruptedException("Interrupted while collecting Routine details"); + } + try { + Routine fullRoutine = getFuture.get(); + if (fullRoutine != null) { + fullRoutines.add(fullRoutine); + } + } catch (ExecutionException e) { + logger.warning("Error processing getRoutine future result: " + e.getCause()); + } catch (CancellationException e) { + logger.warning("getRoutine detail task cancelled."); + } + } + logger.info("Successfully fetched full details for %d routines.", fullRoutines.size()); + return fullRoutines; + } + + void submitProcedureArgumentProcessingJobs( + List fullRoutines, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields, + ExecutorService processArgsExecutor, + List> outArgumentProcessingFutures, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + logger.fine("Submitting argument processing jobs for %d routines.", fullRoutines.size()); + + for (Routine fullRoutine : fullRoutines) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted during submission of argument processing tasks."); + throw new InterruptedException("Interrupted while submitting argument processing jobs"); + } + if (fullRoutine != null) { + if ("PROCEDURE".equalsIgnoreCase(fullRoutine.getRoutineType())) { + final Routine finalFullRoutine = fullRoutine; + Future processFuture = + processArgsExecutor.submit( + () -> + processProcedureArguments( + finalFullRoutine, columnNameRegex, collectedResults, resultSchemaFields)); + outArgumentProcessingFutures.add(processFuture); + } else { + logger.warning( + "Routine " + + (fullRoutine.getRoutineId() != null + ? fullRoutine.getRoutineId().toString() + : "UNKNOWN_ID") + + " fetched via getRoutine was not of type PROCEDURE (Type: " + + fullRoutine.getRoutineType() + + "). Skipping argument processing."); + } + } + } + logger.fine( + "Finished submitting " + + outArgumentProcessingFutures.size() + + " processProcedureArguments tasks."); + } + + Schema defineGetProcedureColumnsSchema() { + List fields = new ArrayList<>(20); + fields.add( + Field.newBuilder("PROCEDURE_CAT", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PROCEDURE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("COLUMN_TYPE", StandardSQLTypeName.INT64).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("PRECISION", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("LENGTH", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add(Field.newBuilder("SCALE", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add(Field.newBuilder("RADIX", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("COLUMN_DEF", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + void processProcedureArguments( + Routine routine, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields) { + + RoutineId routineId = routine.getRoutineId(); + List arguments; + try { + arguments = routine.getArguments(); + } catch (Exception e) { + LOG.warning( + "Could not retrieve arguments list for procedure %s: %s. No arguments will be" + + " processed.", + routineId, e.getMessage()); + return; + } + + if (arguments == null || arguments.isEmpty()) { + LOG.fine("Procedure " + routineId + " has no arguments."); + return; + } + + String catalogName = routineId.getProject(); + String schemaName = routineId.getDataset(); + String procedureName = routineId.getRoutine(); + String specificName = procedureName; + + for (int i = 0; i < arguments.size(); i++) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Argument processing task interrupted for " + routineId); + break; + } + + int ordinalPosition = i + 1; + RoutineArgument arg; + String argName; + + try { + arg = arguments.get(i); + argName = arg.getName(); + } catch (Exception listAccessException) { + LOG.warning( + "Exception during arguments.get(%d) for Proc: %s. Ordinal: %d. Message: %s." + + " Generating fallback row.", + i, routineId, ordinalPosition, listAccessException.getMessage()); + argName = "arg_retrieval_err_" + ordinalPosition; + arg = null; + } + + // Filter by columnNamePattern, but not by generated fallback name + if (columnNameRegex != null) { + assert argName != null; + if (!argName.startsWith("arg_")) { + if (!columnNameRegex.matcher(argName).matches()) { + continue; + } + } + } + + List values = + createProcedureColumnRow( + catalogName, schemaName, procedureName, specificName, arg, ordinalPosition, argName); + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + } + } + + List createProcedureColumnRow( + String catalog, + String schemaName, + String procedureName, + String specificName, + @Nullable RoutineArgument argument, + int ordinalPosition, + String columnName) { + + List values = new ArrayList<>(20); + ColumnTypeInfo typeInfo; + + if (argument == null) { + LOG.warning( + "Proc: %s, Arg: %s (Pos %d) - RoutineArgument object is null. Defaulting type to" + + " VARCHAR.", + procedureName, columnName, ordinalPosition); + typeInfo = new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + } else { + try { + StandardSQLDataType argumentDataType = argument.getDataType(); + if (argumentDataType == null) { + LOG.warning( + "Proc: %s, Arg: %s (Pos %d) - argument.getDataType() returned null. Defaulting" + + " type to VARCHAR.", + procedureName, columnName, ordinalPosition); + typeInfo = new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + } else { + typeInfo = + determineTypeInfoFromDataType( + argumentDataType, procedureName, columnName, ordinalPosition); + } + } catch (Exception e) { + LOG.warning( + "Proc: %s, Arg: %s (Pos %d) - Unexpected Exception during type processing." + + " Defaulting type to VARCHAR. Error: %s", + procedureName, columnName, ordinalPosition, e.getMessage()); + typeInfo = new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + } + } + + String argumentModeStr = null; + if (argument != null) { + try { + argumentModeStr = argument.getMode(); + } catch (Exception e) { + LOG.warning( + "Proc: %s, Arg: %s (Pos %d) - Could not get argument mode. Error: %s", + procedureName, columnName, ordinalPosition, e.getMessage()); + } + } + + values.add(createStringFieldValue(catalog)); // 1. PROCEDURE_CAT + values.add(createStringFieldValue(schemaName)); // 2. PROCEDURE_SCHEM + values.add(createStringFieldValue(procedureName)); // 3. PROCEDURE_NAME + values.add(createStringFieldValue(columnName)); // 4. COLUMN_NAME + long columnTypeJdbc = DatabaseMetaData.procedureColumnUnknown; + if ("IN".equalsIgnoreCase(argumentModeStr)) { + columnTypeJdbc = DatabaseMetaData.procedureColumnIn; + } else if ("OUT".equalsIgnoreCase(argumentModeStr)) { + columnTypeJdbc = DatabaseMetaData.procedureColumnOut; + } else if ("INOUT".equalsIgnoreCase(argumentModeStr)) { + columnTypeJdbc = DatabaseMetaData.procedureColumnInOut; + } + values.add(createLongFieldValue(columnTypeJdbc)); // 5. COLUMN_TYPE + values.add(createLongFieldValue((long) typeInfo.jdbcType)); // 6. DATA_TYPE (java.sql.Types) + values.add(createStringFieldValue(typeInfo.typeName)); // 7. TYPE_NAME (DB type name) + values.add( + createLongFieldValue( + typeInfo.columnSize == null ? null : typeInfo.columnSize.longValue())); // 8. PRECISION + values.add( + createNullFieldValue()); // 9. LENGTH (length in bytes - often null for procedure params) + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 10. SCALE + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null ? null : typeInfo.numPrecRadix.longValue())); // 11. RADIX + values.add(createLongFieldValue((long) DatabaseMetaData.procedureNullable)); // 12. NULLABLE + values.add( + createNullFieldValue()); // 13. REMARKS (Can be argument.getDescription() if available and + // needed) + values.add(createNullFieldValue()); // 14. COLUMN_DEF (Default value - typically null) + values.add(createNullFieldValue()); // 15. SQL_DATA_TYPE (reserved) + values.add(createNullFieldValue()); // 16. SQL_DATETIME_SUB (reserved) + values.add(createNullFieldValue()); // 17. CHAR_OCTET_LENGTH (null for non-char/binary) + values.add(createLongFieldValue((long) ordinalPosition)); // 18. ORDINAL_POSITION + values.add(createStringFieldValue("YES")); // 19. IS_NULLABLE (Default to "YES") + values.add(createStringFieldValue(specificName)); // 20. SPECIFIC_NAME + + return values; + } + + ColumnTypeInfo determineTypeInfoFromDataType( + StandardSQLDataType argumentDataType, + String procedureName, + String columnName, + int ordinalPosition) { + + ColumnTypeInfo defaultVarcharTypeInfo = + new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + try { + String typeKind = argumentDataType.getTypeKind(); + if (typeKind != null && !typeKind.isEmpty()) { + if ("ARRAY".equalsIgnoreCase(typeKind)) { + return new ColumnTypeInfo(Types.ARRAY, "ARRAY", null, null, null); + } + StandardSQLTypeName determinedType = StandardSQLTypeName.valueOf(typeKind.toUpperCase()); + return getColumnTypeInfoForSqlType(determinedType); + } + } catch (Exception e) { + LOG.warning( + "Proc: %s, Arg: %s (Pos %d) - Caught an unexpected Exception during type" + + " determination. Defaulting type to VARCHAR. Error: %s", + procedureName, columnName, ordinalPosition, e.getMessage()); + } + return defaultVarcharTypeInfo; + } + + Comparator defineGetProcedureColumnsComparator(FieldList resultSchemaFields) { + final int PROC_CAT_IDX = resultSchemaFields.getIndex("PROCEDURE_CAT"); + final int PROC_SCHEM_IDX = resultSchemaFields.getIndex("PROCEDURE_SCHEM"); + final int PROC_NAME_IDX = resultSchemaFields.getIndex("PROCEDURE_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + final int COL_NAME_IDX = resultSchemaFields.getIndex("COLUMN_NAME"); + + if (PROC_CAT_IDX < 0 + || PROC_SCHEM_IDX < 0 + || PROC_NAME_IDX < 0 + || SPEC_NAME_IDX < 0 + || COL_NAME_IDX < 0) { + LOG.severe( + "Could not find required columns (PROCEDURE_CAT, SCHEM, NAME, SPECIFIC_NAME, COLUMN_NAME)" + + " in getProcedureColumns schema for sorting. Returning null comparator."); + return null; + } + + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_CAT_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_SCHEM_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, PROC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, COL_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)); + } + + @Override + public ResultSet getTables( + String catalog, String schemaPattern, String tableNamePattern, String[] types) { + + Tuple effectiveIdentifiers = + determineEffectiveCatalogAndSchema(catalog, schemaPattern); + String effectiveCatalog = effectiveIdentifiers.x(); + String effectiveSchemaPattern = effectiveIdentifiers.y(); + + if ((effectiveCatalog == null || effectiveCatalog.isEmpty()) + || (effectiveSchemaPattern != null && effectiveSchemaPattern.isEmpty()) + || (tableNamePattern != null && tableNamePattern.isEmpty())) { + LOG.warning( + "Returning empty ResultSet as one or more patterns are empty or catalog is null."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + "getTables called for catalog: %s, schemaPattern: %s, tableNamePattern: %s, types: %s", + effectiveCatalog, effectiveSchemaPattern, tableNamePattern, Arrays.toString(types)); + + final Pattern schemaRegex = compileSqlLikePattern(effectiveSchemaPattern); + final Pattern tableNameRegex = compileSqlLikePattern(tableNamePattern); + final Set requestedTypes = + (types == null || types.length == 0) ? null : new HashSet<>(Arrays.asList(types)); + + final Schema resultSchema = defineGetTablesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final String catalogParam = effectiveCatalog; + final String schemaParam = effectiveSchemaPattern; + + Runnable tableFetcher = + () -> { + ExecutorService apiExecutor = null; + ExecutorService tableProcessorExecutor = null; + final FieldList localResultSchemaFields = resultSchemaFields; + final List>> apiFutures = new ArrayList<>(); + final List> processingFutures = new ArrayList<>(); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaParam, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Returning empty resultset."); + return; + } + + apiExecutor = Executors.newFixedThreadPool(API_EXECUTOR_POOL_SIZE); + tableProcessorExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + LOG.fine("Submitting parallel findMatchingTables tasks..."); + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Table fetcher interrupted during dataset iteration."); + break; + } + + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> apiCallable = + () -> + findMatchingBigQueryObjects( + "Table", + () -> + bigquery.listTables( + currentDatasetId, TableListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getTable( + TableId.of( + currentDatasetId.getProject(), + currentDatasetId.getDataset(), + name)), + (tbl) -> tbl.getTableId().getTable(), + tableNamePattern, + tableNameRegex, + LOG); + Future> apiFuture = apiExecutor.submit(apiCallable); + apiFutures.add(apiFuture); + } + LOG.fine("Finished submitting " + apiFutures.size() + " findMatchingTables tasks."); + apiExecutor.shutdown(); + + LOG.fine("Processing results from findMatchingTables tasks..."); + for (Future> apiFuture : apiFutures) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Table fetcher interrupted while processing API futures."); + break; + } + try { + List tablesResult = apiFuture.get(); + if (tablesResult != null) { + for (Table table : tablesResult) { + if (Thread.currentThread().isInterrupted()) break; + + final Table currentTable = table; + Future processFuture = + tableProcessorExecutor.submit( + () -> + processTableInfo( + currentTable, + requestedTypes, + collectedResults, + localResultSchemaFields)); + processingFutures.add(processFuture); + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Fetcher thread interrupted while waiting for API future result."); + break; + } catch (ExecutionException e) { + LOG.warning( + "Error executing findMatchingTables task: " + + e.getMessage() + + ". Cause: " + + e.getCause()); + } catch (CancellationException e) { + LOG.warning("A findMatchingTables task was cancelled."); + } + } + + LOG.fine( + "Finished submitting " + processingFutures.size() + " processTableInfo tasks."); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher interrupted before waiting for processing tasks; cancelling remaining."); + processingFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine("Waiting for processTableInfo tasks to complete..."); + waitForTasksCompletion(processingFutures); + LOG.fine("All processTableInfo tasks completed."); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetTablesComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getTables", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in table fetcher runnable: " + t.getMessage()); + apiFutures.forEach(f -> f.cancel(true)); + processingFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(apiExecutor); + shutdownExecutor(tableProcessorExecutor); + LOG.info("Table fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(tableFetcher, "getTables-fetcher-" + effectiveCatalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getTables"); + return resultSet; + } + + Schema defineGetTablesSchema() { + List fields = new ArrayList<>(10); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TABLE_TYPE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SELF_REFERENCING_COL_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("REF_GENERATION", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + void processTableInfo( + Table table, + Set requestedTypes, + List collectedResults, + FieldList resultSchemaFields) { + + TableId tableId = table.getTableId(); + LOG.fine("Processing table info for: " + tableId); + + try { + String catalogName = tableId.getProject(); + String schemaName = tableId.getDataset(); + String tableName = tableId.getTable(); + TableDefinition definition = table.getDefinition(); + String bqTableType = definition.getType().toString(); + String remarks = table.getDescription(); + + if (requestedTypes != null && !requestedTypes.contains(bqTableType)) { + LOG.finer( + "Skipping table %s as its type '%s' is not in the requested types %s", + tableId, bqTableType, requestedTypes); + return; + } + + List values = new ArrayList<>(resultSchemaFields.size()); + values.add(createStringFieldValue(catalogName)); // 1. TABLE_CAT + values.add(createStringFieldValue(schemaName)); // 2. TABLE_SCHEM + values.add(createStringFieldValue(tableName)); // 3. TABLE_NAME + values.add(createStringFieldValue(bqTableType)); // 4. TABLE_TYPE + values.add(createStringFieldValue(remarks)); // 5. REMARKS + values.add(createNullFieldValue()); // 6. TYPE_CAT (always null) + values.add(createNullFieldValue()); // 7. TYPE_SCHEM (always null) + values.add(createNullFieldValue()); // 8. TYPE_NAME (always null) + values.add(createNullFieldValue()); // 9. SELF_REFERENCING_COL_NAME (always null) + values.add(createNullFieldValue()); // 10. REF_GENERATION (always null) + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + + LOG.fine("Processed and added table info row for: " + tableId); + } catch (Exception e) { + LOG.warning( + "Error processing table info for %s: %s. Skipping this table.", tableId, e.getMessage()); + } + } + + Comparator defineGetTablesComparator(FieldList resultSchemaFields) { + final int TABLE_TYPE_IDX = resultSchemaFields.getIndex("TABLE_TYPE"); + final int TABLE_CAT_IDX = resultSchemaFields.getIndex("TABLE_CAT"); + final int TABLE_SCHEM_IDX = resultSchemaFields.getIndex("TABLE_SCHEM"); + final int TABLE_NAME_IDX = resultSchemaFields.getIndex("TABLE_NAME"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_TYPE_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_NAME_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public ResultSet getSchemas() { + LOG.info("getSchemas() called"); + + return getSchemas(null, null); + } + + @Override + public ResultSet getCatalogs() { + LOG.info("getCatalogs() called"); + + final List accessibleCatalogs = getAccessibleCatalogNames(); + final Schema catalogsSchema = defineGetCatalogsSchema(); + final FieldList schemaFields = catalogsSchema.getFields(); + final List catalogRows = + prepareGetCatalogsRows(schemaFields, accessibleCatalogs); + + final BlockingQueue queue = + new LinkedBlockingQueue<>(catalogRows.isEmpty() ? 1 : catalogRows.size() + 1); + + populateQueue(catalogRows, queue, schemaFields); + signalEndOfData(queue, schemaFields); + + return BigQueryJsonResultSet.of( + catalogsSchema, catalogRows.size(), queue, this.statement, new Thread[0]); + } + + Schema defineGetCatalogsSchema() { + return Schema.of( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING).setMode(Mode.REQUIRED).build()); + } + + List prepareGetCatalogsRows( + FieldList schemaFields, List accessibleCatalogs) { + List catalogRows = new ArrayList<>(); + for (String catalogName : accessibleCatalogs) { + FieldValue fieldValue = FieldValue.of(FieldValue.Attribute.PRIMITIVE, catalogName); + catalogRows.add(FieldValueList.of(Collections.singletonList(fieldValue), schemaFields)); + } + return catalogRows; + } + + @Override + public ResultSet getTableTypes() { + LOG.info("getTableTypes() called"); + + final Schema tableTypesSchema = defineGetTableTypesSchema(); + final List tableTypeRows = prepareGetTableTypesRows(tableTypesSchema); + + BlockingQueue queue = + new LinkedBlockingQueue<>(tableTypeRows.size() + 1); + + populateQueue(tableTypeRows, queue, tableTypesSchema.getFields()); + signalEndOfData(queue, tableTypesSchema.getFields()); + + return BigQueryJsonResultSet.of( + tableTypesSchema, tableTypeRows.size(), queue, this.statement, new Thread[0]); + } + + static Schema defineGetTableTypesSchema() { + return Schema.of( + Field.newBuilder("TABLE_TYPE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + } + + static List prepareGetTableTypesRows(Schema schema) { + final String[] tableTypes = {"EXTERNAL", "MATERIALIZED VIEW", "SNAPSHOT", "TABLE", "VIEW"}; + List rows = new ArrayList<>(tableTypes.length); + FieldList schemaFields = schema.getFields(); + + for (String typeName : tableTypes) { + FieldValue fieldValue = FieldValue.of(FieldValue.Attribute.PRIMITIVE, typeName); + rows.add(FieldValueList.of(Collections.singletonList(fieldValue), schemaFields)); + } + return rows; + } + + @Override + public ResultSet getColumns( + String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) { + + Tuple effectiveIdentifiers = + determineEffectiveCatalogAndSchema(catalog, schemaPattern); + String effectiveCatalog = effectiveIdentifiers.x(); + String effectiveSchemaPattern = effectiveIdentifiers.y(); + + if ((effectiveCatalog == null || effectiveCatalog.isEmpty()) + || (effectiveSchemaPattern != null && effectiveSchemaPattern.isEmpty()) + || (tableNamePattern != null && tableNamePattern.isEmpty()) + || (columnNamePattern != null && columnNamePattern.isEmpty())) { + LOG.warning( + "Returning empty ResultSet as one or more patterns are empty or catalog is null."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + "getColumns called for catalog: %s, schemaPattern: %s, tableNamePattern: %s," + + " columnNamePattern: %s", + effectiveCatalog, effectiveSchemaPattern, tableNamePattern, columnNamePattern); + + Pattern schemaRegex = compileSqlLikePattern(effectiveSchemaPattern); + Pattern tableNameRegex = compileSqlLikePattern(tableNamePattern); + Pattern columnNameRegex = compileSqlLikePattern(columnNamePattern); + + final Schema resultSchema = defineGetColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final String catalogParam = effectiveCatalog; + final String schemaParam = effectiveSchemaPattern; + + Runnable columnFetcher = + () -> { + ExecutorService columnExecutor = null; + final List> taskFutures = new ArrayList<>(); + final FieldList localResultSchemaFields = resultSchemaFields; + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaParam, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Returning empty resultset."); + return; + } + + columnExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Fetcher interrupted during dataset iteration."); + break; + } + + DatasetId datasetId = dataset.getDatasetId(); + LOG.info("Processing dataset: " + datasetId.getDataset()); + + List
tablesToScan = + findMatchingBigQueryObjects( + "Table", + () -> + bigquery.listTables( + datasetId, TableListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getTable( + TableId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (tbl) -> tbl.getTableId().getTable(), + tableNamePattern, + tableNameRegex, + LOG); + + for (Table table : tablesToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher interrupted during table iteration for dataset " + + datasetId.getDataset()); + break; + } + + TableId tableId = table.getTableId(); + LOG.fine("Submitting task for table: " + tableId); + final Table finalTable = table; + Future future = + columnExecutor.submit( + () -> + processTableColumns( + finalTable, + columnNameRegex, + collectedResults, + localResultSchemaFields)); + taskFutures.add(future); + } + if (Thread.currentThread().isInterrupted()) break; + } + + waitForTasksCompletion(taskFutures); + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetColumnsComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getColumns", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in column fetcher runnable: " + t.getMessage()); + taskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(columnExecutor); + LOG.info("Column fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(columnFetcher, "getColumns-fetcher-" + effectiveCatalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of(resultSchema, -1, queue, null, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getColumns"); + return resultSet; + } + + private void processTableColumns( + Table table, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields) { + TableId tableId = table.getTableId(); + LOG.fine("Processing columns for table: " + tableId); + TableDefinition definition = table.getDefinition(); + Schema tableSchema = (definition != null) ? definition.getSchema() : null; + + try { + if (tableSchema == null) { + LOG.fine( + "Schema not included in table object for " + + tableId + + ", fetching full table details..."); + Table fullTable = bigquery.getTable(tableId); + if (fullTable != null) { + definition = fullTable.getDefinition(); + tableSchema = (definition != null) ? definition.getSchema() : null; + } else { + LOG.warning( + "Table " + tableId + " not found when fetching full details for columns. Skipping."); + return; + } + } + + if (tableSchema == null + || tableSchema.getFields() == null + || tableSchema.getFields().isEmpty()) { + LOG.warning( + "Schema not found or fields are null for table %s (Type: %s). Skipping columns.", + tableId, definition.getType()); + return; + } + + FieldList fields = tableSchema.getFields(); + String catalogName = tableId.getProject(); + String schemaName = tableId.getDataset(); + String tableName = tableId.getTable(); + + for (int i = 0; i < fields.size(); i++) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Task for table " + tableId + " interrupted during column iteration."); + break; + } + Field field = fields.get(i); + String currentColumnName = field.getName(); + if (columnNameRegex != null && !columnNameRegex.matcher(currentColumnName).matches()) + continue; + List values = createColumnRow(catalogName, schemaName, tableName, field, i + 1); + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + } + LOG.fine("Finished processing columns for table: " + tableId); + } catch (BigQueryException e) { + LOG.warning( + "BigQueryException processing table %s: %s (Code: %d)", + tableId, e.getMessage(), e.getCode()); + } catch (Exception e) { + LOG.severe("Unexpected error processing table %s: %s", tableId, e.getMessage()); + } + } + + private Schema defineGetColumnsSchema() { + List fields = new ArrayList<>(24); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 1 + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 2 + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 3 + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 4 + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); // 5 + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 6 + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 7 + fields.add( + Field.newBuilder("BUFFER_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 8 + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 9 + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 10 + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); // 11 + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 12 + fields.add( + Field.newBuilder("COLUMN_DEF", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 13 + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 14 + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 15 + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 16 + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); // 17 + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 18 + fields.add( + Field.newBuilder("SCOPE_CATALOG", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 19 + fields.add( + Field.newBuilder("SCOPE_SCHEMA", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 20 + fields.add( + Field.newBuilder("SCOPE_TABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 21 + fields.add( + Field.newBuilder("SOURCE_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 22 + fields.add( + Field.newBuilder("IS_AUTOINCREMENT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 23 + fields.add( + Field.newBuilder("IS_GENERATEDCOLUMN", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 24 + return Schema.of(fields); + } + + List createColumnRow( + String catalog, String schemaName, String tableName, Field field, int ordinalPosition) { + List values = new ArrayList<>(24); + Field.Mode mode = (field.getMode() == null) ? Field.Mode.NULLABLE : field.getMode(); + ColumnTypeInfo typeInfo = mapBigQueryTypeToJdbc(field); + + values.add(createStringFieldValue(catalog)); // 1. TABLE_CAT + values.add(createStringFieldValue(schemaName)); // 2. TABLE_SCHEM + values.add(createStringFieldValue(tableName)); // 3. TABLE_NAME + values.add(createStringFieldValue(field.getName())); // 4. COLUMN_NAME + values.add(createLongFieldValue((long) typeInfo.jdbcType)); // 5. DATA_TYPE + values.add(createStringFieldValue(typeInfo.typeName)); // 6. TYPE_NAME + values.add( + createLongFieldValue( + typeInfo.columnSize == null + ? null + : typeInfo.columnSize.longValue())); // 7. COLUMN_SIZE + values.add(createNullFieldValue()); // 8. BUFFER_LENGTH + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 9. DECIMAL_DIGITS + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null + ? null + : typeInfo.numPrecRadix.longValue())); // 10. NUM_PREC_RADIX + int nullable = + (mode == Field.Mode.REQUIRED) + ? DatabaseMetaData.columnNoNulls + : DatabaseMetaData.columnNullable; + values.add(createLongFieldValue((long) nullable)); // 11. NULLABLE + values.add(createStringFieldValue(field.getDescription())); // 12. REMARKS + values.add(createNullFieldValue()); // 13. COLUMN_DEF + values.add(createNullFieldValue()); // 14. SQL_DATA_TYPE + values.add(createNullFieldValue()); // 15. SQL_DATETIME_SUB + values.add(createNullFieldValue()); // 16. CHAR_OCTET_LENGTH + values.add(createLongFieldValue((long) ordinalPosition)); // 17. ORDINAL_POSITION + String isNullable = ""; + switch (mode) { + case REQUIRED: + isNullable = "NO"; + break; + case NULLABLE: + case REPEATED: + isNullable = "YES"; + break; + } + values.add(createStringFieldValue(isNullable)); // 18. IS_NULLABLE + values.add(createNullFieldValue()); // 19. SCOPE_CATALOG + values.add(createNullFieldValue()); // 20. SCOPE_SCHEMA + values.add(createNullFieldValue()); // 21. SCOPE_TABLE + values.add(createNullFieldValue()); // 22. SOURCE_DATA_TYPE + values.add(createStringFieldValue("NO")); // 23. IS_AUTOINCREMENT + values.add(createStringFieldValue("NO")); // 24. IS_GENERATEDCOLUMN + + return values; + } + + static class ColumnTypeInfo { + final int jdbcType; + final String typeName; + final Integer columnSize; + final Integer decimalDigits; + final Integer numPrecRadix; + + ColumnTypeInfo( + int jdbcType, + String typeName, + Integer columnSize, + Integer decimalDigits, + Integer numPrecRadix) { + this.jdbcType = jdbcType; + this.typeName = typeName; + this.columnSize = columnSize; + this.decimalDigits = decimalDigits; + this.numPrecRadix = numPrecRadix; + } + } + + ColumnTypeInfo mapBigQueryTypeToJdbc(Field field) { + Mode mode = (field.getMode() == null) ? Mode.NULLABLE : field.getMode(); + if (mode == Mode.REPEATED) { + return new ColumnTypeInfo(Types.ARRAY, "ARRAY", null, null, null); + } + + StandardSQLTypeName bqType = null; + if (field.getType() != null && field.getType().getStandardType() != null) { + bqType = field.getType().getStandardType(); + } + return getColumnTypeInfoForSqlType(bqType); + } + + private Comparator defineGetColumnsComparator(FieldList resultSchemaFields) { + final int TABLE_CAT_IDX = resultSchemaFields.getIndex("TABLE_CAT"); + final int TABLE_SCHEM_IDX = resultSchemaFields.getIndex("TABLE_SCHEM"); + final int TABLE_NAME_IDX = resultSchemaFields.getIndex("TABLE_NAME"); + final int ORDINAL_POS_IDX = resultSchemaFields.getIndex("ORDINAL_POSITION"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_NAME_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getLongValueOrNull(fvl, ORDINAL_POS_IDX), + Comparator.nullsFirst(Long::compareTo)); + } + + @Override + public ResultSet getColumnPrivileges( + String catalog, String schema, String table, String columnNamePattern) { + LOG.info( + "getColumnPrivileges called for catalog: %s, schema: %s, table: %s, columnNamePattern:" + + " %s. BigQuery IAM model differs from SQL privileges; returning empty ResultSet.", + catalog, schema, table, columnNamePattern); + + final Schema resultSchema = defineGetColumnPrivilegesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetColumnPrivilegesSchema() { + List fields = defineBasePrivilegeFields(); + + Field columnNameField = + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build(); + fields.add(3, columnNameField); + + return Schema.of(fields); + } + + @Override + public ResultSet getTablePrivileges( + String catalog, String schemaPattern, String tableNamePattern) { + LOG.info( + "getTablePrivileges called for catalog: %s, schemaPattern: %s, tableNamePattern: %s. " + + "BigQuery IAM model differs from SQL privileges; returning empty ResultSet.", + catalog, schemaPattern, tableNamePattern); + + final Schema resultSchema = defineGetTablePrivilegesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetTablePrivilegesSchema() { + List fields = defineBasePrivilegeFields(); + return Schema.of(fields); + } + + @Override + public ResultSet getBestRowIdentifier( + String catalog, String schema, String table, int scope, boolean nullable) { + LOG.info( + "getBestRowIdentifier called for catalog: %s, schema: %s, table: %s, scope: %d," + + " nullable: %s. BigQuery does not support best row identifiers; returning empty" + + " ResultSet.", + catalog, schema, table, scope, nullable); + + final Schema resultSchema = defineGetBestRowIdentifierSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetBestRowIdentifierSchema() { + List fields = new ArrayList<>(8); + fields.add( + Field.newBuilder("SCOPE", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("BUFFER_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PSEUDO_COLUMN", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + @Override + public ResultSet getVersionColumns(String catalog, String schema, String table) { + LOG.info( + "getVersionColumns called for catalog: %s, schema: %s, table: %s. " + + "Automatic version columns not supported by BigQuery; returning empty ResultSet.", + catalog, schema, table); + + final Schema resultSchema = defineGetVersionColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetVersionColumnsSchema() { + List fields = new ArrayList<>(8); + fields.add( + Field.newBuilder("SCOPE", StandardSQLTypeName.INT64).setMode(Field.Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("BUFFER_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PSEUDO_COLUMN", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + @Override + public ResultSet getPrimaryKeys(String catalog, String schema, String table) throws SQLException { + String sql = readSqlFromFile(GET_PRIMARY_KEYS_SQL); + try { + String formattedSql = replaceSqlParameters(sql, catalog, schema, table); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getImportedKeys(String catalog, String schema, String table) + throws SQLException { + String sql = readSqlFromFile(GET_IMPORTED_KEYS_SQL); + try { + String formattedSql = replaceSqlParameters(sql, catalog, schema, table); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getExportedKeys(String catalog, String schema, String table) + throws SQLException { + String sql = readSqlFromFile(GET_EXPORTED_KEYS_SQL); + try { + String formattedSql = replaceSqlParameters(sql, catalog, schema, table); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getCrossReference( + String parentCatalog, + String parentSchema, + String parentTable, + String foreignCatalog, + String foreignSchema, + String foreignTable) + throws SQLException { + String sql = readSqlFromFile(GET_CROSS_REFERENCE_SQL); + try { + String formattedSql = + replaceSqlParameters( + sql, + parentCatalog, + parentSchema, + parentTable, + foreignCatalog, + foreignSchema, + foreignTable); + return this.statement.executeQuery(formattedSql); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Override + public ResultSet getTypeInfo() { + LOG.info("getTypeInfo() called"); + + final Schema typeInfoSchema = defineGetTypeInfoSchema(); + final FieldList schemaFields = typeInfoSchema.getFields(); + final List typeInfoRows = prepareGetTypeInfoRows(schemaFields); + + final Comparator comparator = defineGetTypeInfoComparator(schemaFields); + sortResults(typeInfoRows, comparator, "getTypeInfo", LOG); + final BlockingQueue queue = + new LinkedBlockingQueue<>(typeInfoRows.size() + 1); + + populateQueue(typeInfoRows, queue, schemaFields); + signalEndOfData(queue, schemaFields); + return BigQueryJsonResultSet.of( + typeInfoSchema, typeInfoRows.size(), queue, this.statement, new Thread[0]); + } + + Schema defineGetTypeInfoSchema() { + List fields = new ArrayList<>(18); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 1 + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 2 + fields.add( + Field.newBuilder("PRECISION", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 3 + fields.add( + Field.newBuilder("LITERAL_PREFIX", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 4 + fields.add( + Field.newBuilder("LITERAL_SUFFIX", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 5 + fields.add( + Field.newBuilder("CREATE_PARAMS", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 6 + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 7 + fields.add( + Field.newBuilder("CASE_SENSITIVE", StandardSQLTypeName.BOOL) + .setMode(Mode.REQUIRED) + .build()); // 8 + fields.add( + Field.newBuilder("SEARCHABLE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 9 + fields.add( + Field.newBuilder("UNSIGNED_ATTRIBUTE", StandardSQLTypeName.BOOL) + .setMode(Mode.NULLABLE) + .build()); // 10 + fields.add( + Field.newBuilder("FIXED_PREC_SCALE", StandardSQLTypeName.BOOL) + .setMode(Mode.REQUIRED) + .build()); // 11 + fields.add( + Field.newBuilder("AUTO_INCREMENT", StandardSQLTypeName.BOOL) + .setMode(Mode.REQUIRED) + .build()); // 12 + fields.add( + Field.newBuilder("LOCAL_TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 13 + fields.add( + Field.newBuilder("MINIMUM_SCALE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 14 + fields.add( + Field.newBuilder("MAXIMUM_SCALE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 15 + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 16 + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 17 + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 18 + return Schema.of(fields); + } + + List prepareGetTypeInfoRows(FieldList schemaFields) { + List rows = new ArrayList<>(); + + Function createRow = + (data) -> { + List values = new ArrayList<>(18); + values.add(createStringFieldValue(data.typeName)); // 1. TYPE_NAME + values.add(createLongFieldValue((long) data.jdbcType)); // 2. DATA_TYPE + values.add(createLongFieldValue(data.precision)); // 3. PRECISION + values.add(createStringFieldValue(data.literalPrefix)); // 4. LITERAL_PREFIX + values.add(createStringFieldValue(data.literalSuffix)); // 5. LITERAL_SUFFIX + values.add(createStringFieldValue(data.createParams)); // 6. CREATE_PARAMS + values.add(createLongFieldValue((long) data.nullable)); // 7. NULLABLE + values.add(createBooleanFieldValue(data.caseSensitive)); // 8. CASE_SENSITIVE + values.add(createLongFieldValue((long) data.searchable)); // 9. SEARCHABLE + values.add(createBooleanFieldValue(data.unsignedAttribute)); // 10. UNSIGNED_ATTRIBUTE + values.add(createBooleanFieldValue(data.fixedPrecScale)); // 11. FIXED_PREC_SCALE + values.add(createBooleanFieldValue(data.autoIncrement)); // 12. AUTO_INCREMENT + values.add(createStringFieldValue(data.localTypeName)); // 13. LOCAL_TYPE_NAME + values.add(createLongFieldValue(data.minimumScale)); // 14. MINIMUM_SCALE + values.add(createLongFieldValue(data.maximumScale)); // 15. MAXIMUM_SCALE + values.add(createNullFieldValue()); // 16. SQL_DATA_TYPE + values.add(createNullFieldValue()); // 17. SQL_DATETIME_SUB + values.add(createLongFieldValue(data.numPrecRadix)); // 18. NUM_PREC_RADIX + return FieldValueList.of(values, schemaFields); + }; + + rows.add( + createRow.apply( + new TypeInfoRowData( + "INT64", + Types.BIGINT, + 19L, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "INT64", + 0L, + 0L, + 10L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "BOOL", + Types.BOOLEAN, + 1L, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typePredBasic, + false, + false, + false, + "BOOL", + 0L, + 0L, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "FLOAT64", + Types.DOUBLE, + 15L, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "FLOAT64", + null, + null, + 2L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "NUMERIC", + Types.NUMERIC, + 38L, + null, + null, + "PRECISION,SCALE", + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + true, + false, + "NUMERIC", + 9L, + 9L, + 10L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "BIGNUMERIC", + Types.NUMERIC, + 77L, + null, + null, + "PRECISION,SCALE", + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + true, + false, + "BIGNUMERIC", + 38L, + 38L, + 10L))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "STRING", + Types.NVARCHAR, + null, + "'", + "'", + "LENGTH", + DatabaseMetaData.typeNullable, + true, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "STRING", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "TIMESTAMP", + Types.TIMESTAMP, + 29L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "TIMESTAMP", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "DATE", + Types.DATE, + 10L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "DATE", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "TIME", + Types.TIME, + 15L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "TIME", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "DATETIME", + Types.TIMESTAMP, + 29L, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "DATETIME", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "GEOGRAPHY", + Types.OTHER, + null, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "GEOGRAPHY", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "JSON", + Types.OTHER, + null, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "JSON", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "INTERVAL", + Types.OTHER, + null, + "'", + "'", + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "INTERVAL", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "BYTES", + Types.VARBINARY, + null, + "0x", + null, + "LENGTH", + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "BYTES", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "STRUCT", + Types.STRUCT, + null, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typePredNone, + false, + false, + false, + "STRUCT", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "ARRAY", + Types.ARRAY, + null, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typePredNone, + false, + false, + false, + "ARRAY", + null, + null, + null))); + rows.add( + createRow.apply( + new TypeInfoRowData( + "RANGE", + Types.OTHER, + null, + null, + null, + null, + DatabaseMetaData.typeNullable, + false, + DatabaseMetaData.typeSearchable, + false, + false, + false, + "RANGE", + null, + null, + null))); + + return rows; + } + + Comparator defineGetTypeInfoComparator(FieldList schemaFields) { + final int DATA_TYPE_IDX = schemaFields.getIndex("DATA_TYPE"); + if (DATA_TYPE_IDX < 0) { + LOG.severe( + "Could not find DATA_TYPE column in getTypeInfo schema for sorting. Returning null" + + " comparator."); + return null; + } + Comparator comparator = + Comparator.comparing( + (FieldValueList fvl) -> getLongValueOrNull(fvl, DATA_TYPE_IDX), + Comparator.nullsFirst(Long::compareTo)); + return comparator; + } + + @Override + public ResultSet getIndexInfo( + String catalog, String schema, String table, boolean unique, boolean approximate) { + LOG.info( + "getIndexInfo called for catalog: %s, schema: %s, table: %s, unique: %s, approximate:" + + " %s. Traditional indexes not supported by BigQuery; returning empty ResultSet.", + catalog, schema, table, unique, approximate); + + final Schema resultSchema = defineGetIndexInfoSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetIndexInfoSchema() { + List fields = new ArrayList<>(13); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("NON_UNIQUE", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("INDEX_QUALIFIER", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("INDEX_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build()); + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ASC_OR_DESC", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CARDINALITY", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("PAGES", StandardSQLTypeName.INT64).setMode(Field.Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("FILTER_CONDITION", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + @Override + public boolean supportsResultSetType(int type) { + // BigQuery primarily supports forward-only result sets. + return type == ResultSet.TYPE_FORWARD_ONLY; + } + + @Override + public boolean supportsResultSetConcurrency(int type, int concurrency) { + // BigQuery primarily supports forward-only, read-only result sets. + return type == ResultSet.TYPE_FORWARD_ONLY && concurrency == ResultSet.CONCUR_READ_ONLY; + } + + @Override + public boolean ownUpdatesAreVisible(int type) { + return false; + } + + @Override + public boolean ownDeletesAreVisible(int type) { + return false; + } + + @Override + public boolean ownInsertsAreVisible(int type) { + return false; + } + + @Override + public boolean othersUpdatesAreVisible(int type) { + return false; + } + + @Override + public boolean othersDeletesAreVisible(int type) { + return false; + } + + @Override + public boolean othersInsertsAreVisible(int type) { + return false; + } + + @Override + public boolean updatesAreDetected(int type) { + return false; + } + + @Override + public boolean deletesAreDetected(int type) { + return false; + } + + @Override + public boolean insertsAreDetected(int type) { + return false; + } + + @Override + public boolean supportsBatchUpdates() { + return false; + } + + @Override + public ResultSet getUDTs( + String catalog, String schemaPattern, String typeNamePattern, int[] types) { + LOG.info( + "getUDTs called for catalog: %s, schemaPattern: %s, typeNamePattern: %s, types: %s. " + + "Feature not supported by BigQuery; returning empty ResultSet.", + catalog, schemaPattern, typeNamePattern, (types == null ? "null" : Arrays.toString(types))); + + final Schema resultSchema = defineGetUDTsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetUDTsSchema() { + List fields = new ArrayList<>(7); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("CLASS_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("BASE_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + @Override + public Connection getConnection() { + return connection; + } + + @Override + public boolean supportsSavepoints() { + return false; + } + + @Override + public boolean supportsNamedParameters() { + return false; + } + + @Override + public boolean supportsMultipleOpenResults() { + return false; + } + + @Override + public boolean supportsGetGeneratedKeys() { + return false; + } + + @Override + public ResultSet getSuperTables(String catalog, String schemaPattern, String tableNamePattern) { + LOG.info( + "getSuperTables called for catalog: %s, schemaPattern: %s, tableNamePattern: %s. " + + "BigQuery does not support super tables; returning empty ResultSet.", + catalog, schemaPattern, tableNamePattern); + + final Schema resultSchema = defineGetSuperTablesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetSuperTablesSchema() { + List fields = new ArrayList<>(4); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 1. TABLE_CAT + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 2. TABLE_SCHEM + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 3. TABLE_NAME + fields.add( + Field.newBuilder("SUPERTABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 4. SUPERTABLE_NAME + return Schema.of(fields); + } + + @Override + public ResultSet getSuperTypes(String catalog, String schemaPattern, String typeNamePattern) { + LOG.info( + "getSuperTypes called for catalog: %s, schemaPattern: %s, typeNamePattern: %s. BigQuery" + + " does not support user-defined type hierarchies; returning empty ResultSet.", + catalog, schemaPattern, typeNamePattern); + + final Schema resultSchema = defineGetSuperTypesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetSuperTypesSchema() { + List fields = new ArrayList<>(6); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 1. TYPE_CAT + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 2. TYPE_SCHEM + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 3. TYPE_NAME + fields.add( + Field.newBuilder("SUPERTYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 4. SUPERTYPE_CAT + fields.add( + Field.newBuilder("SUPERTYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 5. SUPERTYPE_SCHEM + fields.add( + Field.newBuilder("SUPERTYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 6. SUPERTYPE_NAME + return Schema.of(fields); + } + + @Override + public ResultSet getAttributes( + String catalog, String schemaPattern, String typeNamePattern, String attributeNamePattern) { + LOG.info( + "getAttributes called for catalog: %s, schemaPattern: %s, typeNamePattern: %s," + + " attributeNamePattern: %s. Feature not supported by BigQuery; returning empty" + + " ResultSet.", + catalog, schemaPattern, typeNamePattern, attributeNamePattern); + + final Schema resultSchema = defineGetAttributesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetAttributesSchema() { + List fields = new ArrayList<>(21); + fields.add( + Field.newBuilder("TYPE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("ATTR_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("ATTR_TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("ATTR_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ATTR_DEF", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SQL_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SQL_DATETIME_SUB", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("SCOPE_CATALOG", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SCOPE_SCHEMA", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SCOPE_TABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("SOURCE_DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + @Override + public boolean supportsResultSetHoldability(int holdability) { + if (holdability == ResultSet.CLOSE_CURSORS_AT_COMMIT) { + return true; + } + return false; + } + + @Override + public int getResultSetHoldability() { + return ResultSet.CLOSE_CURSORS_AT_COMMIT; + } + + @Override + // Obtained from java libraries pom + // https://github.com/googleapis/java-bigquery/blob/main/pom.xml + public int getDatabaseMajorVersion() { + return 2; + } + + @Override + public int getDatabaseMinorVersion() { + return 0; + } + + @Override + public int getJDBCMajorVersion() { + return 4; + } + + @Override + public int getJDBCMinorVersion() { + return 2; + } + + @Override + public int getSQLStateType() { + return DatabaseMetaData.sqlStateSQL; + } + + @Override + public boolean locatorsUpdateCopy() { + return false; + } + + @Override + public boolean supportsStatementPooling() { + return false; + } + + @Override + public RowIdLifetime getRowIdLifetime() { + return null; + } + + @Override + public ResultSet getSchemas(String catalog, String schemaPattern) { + if ((catalog != null && catalog.isEmpty()) + || (schemaPattern != null && schemaPattern.isEmpty())) { + LOG.warning("Returning empty ResultSet as catalog or schemaPattern is an empty string."); + return new BigQueryJsonResultSet(); + } + + LOG.info("getSchemas called for catalog: %s, schemaPattern: %s", catalog, schemaPattern); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Schema resultSchema = defineGetSchemasSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final String catalogParam = catalog; + + Runnable schemaFetcher = + () -> { + final FieldList localResultSchemaFields = resultSchemaFields; + List projectsToScanList = new ArrayList<>(); + + if (catalogParam != null) { + projectsToScanList.add(catalogParam); + } else { + projectsToScanList.addAll(getAccessibleCatalogNames()); + } + + if (projectsToScanList.isEmpty()) { + LOG.info( + "No valid projects to scan (primary, specified, or additional). Returning empty" + + " resultset."); + return; + } + + try { + for (String currentProjectToScan : projectsToScanList) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Schema fetcher interrupted during project iteration for project: " + + currentProjectToScan); + break; + } + LOG.info("Fetching schemas for project: " + currentProjectToScan); + List datasetsInProject = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + currentProjectToScan, + BigQuery.DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(currentProjectToScan, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsInProject.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher thread found no matching datasets in project: " + + currentProjectToScan); + continue; + } + + LOG.fine("Processing found datasets for project: " + currentProjectToScan); + for (Dataset dataset : datasetsInProject) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Schema fetcher interrupted during dataset iteration for project: " + + currentProjectToScan); + break; + } + processSchemaInfo(dataset, collectedResults, localResultSchemaFields); + } + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetSchemasComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getSchemas", LOG); + } + + if (!Thread.currentThread().isInterrupted()) { + populateQueue(collectedResults, queue, localResultSchemaFields); + } + + } catch (Throwable t) { + LOG.severe("Unexpected error in schema fetcher runnable: " + t.getMessage()); + } finally { + signalEndOfData(queue, localResultSchemaFields); + LOG.info("Schema fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(schemaFetcher, "getSchemas-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getSchemas"); + return resultSet; + } + + Schema defineGetSchemasSchema() { + List fields = new ArrayList<>(2); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("TABLE_CATALOG", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return Schema.of(fields); + } + + void processSchemaInfo( + Dataset dataset, List collectedResults, FieldList resultSchemaFields) { + DatasetId datasetId = dataset.getDatasetId(); + LOG.finer("Processing schema info for dataset: " + datasetId); + try { + String schemaName = datasetId.getDataset(); + String catalogName = datasetId.getProject(); + List values = new ArrayList<>(resultSchemaFields.size()); + values.add(createStringFieldValue(schemaName)); + values.add(createStringFieldValue(catalogName)); + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + LOG.finer("Processed and added schema info row for: " + datasetId); + } catch (Exception e) { + LOG.warning( + "Error processing schema info for dataset %s: %s. Skipping this schema.", + datasetId, e.getMessage()); + } + } + + Comparator defineGetSchemasComparator(FieldList resultSchemaFields) { + final int TABLE_CATALOG_IDX = resultSchemaFields.getIndex("TABLE_CATALOG"); + final int TABLE_SCHEM_IDX = resultSchemaFields.getIndex("TABLE_SCHEM"); + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_CATALOG_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, TABLE_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public boolean supportsStoredFunctionsUsingCallSyntax() { + return false; + } + + @Override + public boolean autoCommitFailureClosesAllResultSets() { + return false; + } + + @Override + public ResultSet getClientInfoProperties() { + LOG.info("getClientInfoProperties() called."); + + final Schema resultSchema = defineGetClientInfoPropertiesSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(4); + final List collectedResults = new ArrayList<>(3); + + try { + collectedResults.add( + FieldValueList.of( + Arrays.asList( + createStringFieldValue("ApplicationName"), + createLongFieldValue(25L), + createNullFieldValue(), + createStringFieldValue( + "The name of the application currently utilizing the connection.")), + resultSchemaFields)); + + collectedResults.add( + FieldValueList.of( + Arrays.asList( + createStringFieldValue("ClientHostname"), + createLongFieldValue(25L), + createNullFieldValue(), + createStringFieldValue( + "The hostname of the computer the application using the connection is running" + + " on.")), + resultSchemaFields)); + + collectedResults.add( + FieldValueList.of( + Arrays.asList( + createStringFieldValue("ClientUser"), + createLongFieldValue(25L), + createNullFieldValue(), + createStringFieldValue( + "The name of the user that the application using the connection is performing" + + " work for.")), + resultSchemaFields)); + + Comparator comparator = + Comparator.comparing( + (FieldValueList fvl) -> + getStringValueOrNull(fvl, resultSchemaFields.getIndex("NAME")), + Comparator.nullsFirst(String::compareToIgnoreCase)); + sortResults(collectedResults, comparator, "getClientInfoProperties", LOG); + populateQueue(collectedResults, queue, resultSchemaFields); + + } catch (Exception e) { + LOG.warning("Unexpected error processing client info properties: " + e.getMessage()); + collectedResults.clear(); + queue.clear(); + } finally { + signalEndOfData(queue, resultSchemaFields); + } + return BigQueryJsonResultSet.of( + resultSchema, collectedResults.size(), queue, this.statement, new Thread[0]); + } + + Schema defineGetClientInfoPropertiesSchema() { + List fields = new ArrayList<>(4); + fields.add( + Field.newBuilder("NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); // 1 + fields.add( + Field.newBuilder("MAX_LEN", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); // 2 + fields.add( + Field.newBuilder("DEFAULT_VALUE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 3 + fields.add( + Field.newBuilder("DESCRIPTION", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); // 4 + return Schema.of(fields); + } + + @Override + public ResultSet getFunctions(String catalog, String schemaPattern, String functionNamePattern) { + if ((catalog == null || catalog.isEmpty()) + || (schemaPattern != null && schemaPattern.isEmpty()) + || (functionNamePattern != null && functionNamePattern.isEmpty())) { + LOG.warning( + "Returning empty ResultSet as catalog is null/empty or a pattern is empty for" + + " getFunctions."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + "getFunctions called for catalog: %s, schemaPattern: %s, functionNamePattern: %s", + catalog, schemaPattern, functionNamePattern); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern functionNameRegex = compileSqlLikePattern(functionNamePattern); + final Schema resultSchema = defineGetFunctionsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable functionFetcher = + () -> { + ExecutorService apiExecutor = null; + ExecutorService routineProcessorExecutor = null; + final FieldList localResultSchemaFields = resultSchemaFields; + final List>> apiFutures = new ArrayList<>(); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty()) { + LOG.info("Fetcher thread found no matching datasets. Returning empty resultset."); + return; + } + + apiExecutor = Executors.newFixedThreadPool(API_EXECUTOR_POOL_SIZE); + routineProcessorExecutor = Executors.newFixedThreadPool(this.metadataFetchThreadCount); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Function fetcher interrupted during dataset iteration submission."); + break; + } + + final DatasetId currentDatasetId = dataset.getDatasetId(); + + Callable> apiCallable = + () -> { + LOG.fine( + "Fetching all routines for dataset: %s, pattern: %s", + currentDatasetId.getDataset(), functionNamePattern); + return findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), + currentDatasetId.getDataset(), + name)), + (rt) -> rt.getRoutineId().getRoutine(), + functionNamePattern, + functionNameRegex, + LOG); + }; + Future> apiFuture = apiExecutor.submit(apiCallable); + apiFutures.add(apiFuture); + } + LOG.fine( + "Finished submitting " + + apiFutures.size() + + " findMatchingRoutines (for functions) tasks."); + apiExecutor.shutdown(); + + for (Future> apiFuture : apiFutures) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Function fetcher interrupted while processing API futures."); + break; + } + try { + List routinesResult = apiFuture.get(); + if (routinesResult != null) { + for (Routine routine : routinesResult) { + if (Thread.currentThread().isInterrupted()) { + break; + } + String routineType = routine.getRoutineType(); + if ("SCALAR_FUNCTION".equalsIgnoreCase(routineType) + || "TABLE_FUNCTION".equalsIgnoreCase(routineType)) { + LOG.fine( + "Submitting processing task for function: " + + routine.getRoutineId() + + " of type " + + routineType); + final Routine finalRoutine = routine; + Future processFuture = + routineProcessorExecutor.submit( + () -> + processFunctionInfo( + finalRoutine, collectedResults, localResultSchemaFields)); + processingTaskFutures.add(processFuture); + } + } + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Function fetcher thread interrupted while waiting for API future result."); + break; + } catch (ExecutionException | CancellationException e) { + LOG.warning( + "Error or cancellation in findMatchingRoutines (for functions) task: " + + e.getMessage()); + } + } + waitForTasksCompletion(processingTaskFutures); + Comparator comparator = + defineGetFunctionsComparator(localResultSchemaFields); + sortResults(collectedResults, comparator, "getFunctions", LOG); + populateQueue(collectedResults, queue, localResultSchemaFields); + } catch (Throwable t) { + LOG.severe("Unexpected error in function fetcher runnable: " + t.getMessage()); + apiFutures.forEach(f -> f.cancel(true)); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, localResultSchemaFields); + shutdownExecutor(apiExecutor); + shutdownExecutor(routineProcessorExecutor); + LOG.info("Function fetcher thread finished."); + } + }; + + Thread fetcherThread = new Thread(functionFetcher, "getFunctions-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getFunctions"); + return resultSet; + } + + Schema defineGetFunctionsSchema() { + List fields = new ArrayList<>(6); + fields.add( + Field.newBuilder("FUNCTION_CAT", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("FUNCTION_SCHEM", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("FUNCTION_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build()); + fields.add( + Field.newBuilder("FUNCTION_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + void processFunctionInfo( + Routine routine, List collectedResults, FieldList resultSchemaFields) { + RoutineId routineId = routine.getRoutineId(); + LOG.fine("Processing function info for: " + routineId); + + try { + String catalogName = routineId.getProject(); + String schemaName = routineId.getDataset(); + String functionName = routineId.getRoutine(); + String remarks = routine.getDescription(); + String specificName = functionName; + + short functionType; + String bqRoutineType = routine.getRoutineType(); + if ("SCALAR_FUNCTION".equalsIgnoreCase(bqRoutineType)) { + functionType = DatabaseMetaData.functionResultUnknown; + } else if ("TABLE_FUNCTION".equalsIgnoreCase(bqRoutineType)) { + functionType = DatabaseMetaData.functionReturnsTable; + } else { + functionType = DatabaseMetaData.functionResultUnknown; + } + + List values = new ArrayList<>(resultSchemaFields.size()); + values.add(createStringFieldValue(catalogName)); // 1. FUNCTION_CAT + values.add(createStringFieldValue(schemaName)); // 2. FUNCTION_SCHEM + values.add(createStringFieldValue(functionName)); // 3. FUNCTION_NAME + values.add(createStringFieldValue(remarks)); // 4. REMARKS + values.add(createLongFieldValue((long) functionType)); // 5. FUNCTION_TYPE + values.add(createStringFieldValue(specificName)); // 6. SPECIFIC_NAME + + FieldValueList rowFvl = FieldValueList.of(values, resultSchemaFields); + collectedResults.add(rowFvl); + LOG.fine("Processed and added function info row for: " + routineId); + + } catch (Exception e) { + LOG.warning( + "Error processing function info for %s: %s. Skipping this function.", + routineId, e.getMessage()); + } + } + + Comparator defineGetFunctionsComparator(FieldList resultSchemaFields) { + final int FUNC_CAT_IDX = resultSchemaFields.getIndex("FUNCTION_CAT"); + final int FUNC_SCHEM_IDX = resultSchemaFields.getIndex("FUNCTION_SCHEM"); + final int FUNC_NAME_IDX = resultSchemaFields.getIndex("FUNCTION_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_CAT_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_SCHEM_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareTo)); + } + + @Override + public ResultSet getFunctionColumns( + String catalog, String schemaPattern, String functionNamePattern, String columnNamePattern) { + if (catalog == null || catalog.isEmpty()) { + LOG.warning("Returning empty ResultSet catalog (project) is null or empty."); + return new BigQueryJsonResultSet(); + } + if ((schemaPattern != null && schemaPattern.isEmpty()) + || (functionNamePattern != null && functionNamePattern.isEmpty()) + || (columnNamePattern != null && columnNamePattern.isEmpty())) { + LOG.warning("Returning empty ResultSet because an explicit empty pattern was provided."); + return new BigQueryJsonResultSet(); + } + + LOG.info( + "getFunctionColumns called for catalog: %s, schemaPattern: %s, functionNamePattern: %s," + + " columnNamePattern: %s", + catalog, schemaPattern, functionNamePattern, columnNamePattern); + + final Pattern schemaRegex = compileSqlLikePattern(schemaPattern); + final Pattern functionNameRegex = compileSqlLikePattern(functionNamePattern); + final Pattern columnNameRegex = compileSqlLikePattern(columnNamePattern); + + final Schema resultSchema = defineGetFunctionColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = + new LinkedBlockingQueue<>(DEFAULT_QUEUE_CAPACITY); + final List collectedResults = Collections.synchronizedList(new ArrayList<>()); + final List> processingTaskFutures = new ArrayList<>(); + final String catalogParam = catalog; + + Runnable functionColumnFetcher = + () -> { + ExecutorService listRoutinesExecutor = null; + ExecutorService getRoutineDetailsExecutor = null; + ExecutorService processParamsExecutor = null; + final String fetcherThreadNameSuffix = + "-" + catalogParam.substring(0, Math.min(10, catalogParam.length())); + + try { + List datasetsToScan = + findMatchingBigQueryObjects( + "Dataset", + () -> + bigquery.listDatasets( + catalogParam, DatasetListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> bigquery.getDataset(DatasetId.of(catalogParam, name)), + (ds) -> ds.getDatasetId().getDataset(), + schemaPattern, + schemaRegex, + LOG); + + if (datasetsToScan.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No matching datasets or interrupted early. Catalog: " + catalogParam); + return; + } + + listRoutinesExecutor = + Executors.newFixedThreadPool( + API_EXECUTOR_POOL_SIZE, + runnable -> new Thread(runnable, "funcol-list-rout" + fetcherThreadNameSuffix)); + List functionIdsToGet = + listMatchingFunctionIdsFromDatasets( + datasetsToScan, + functionNamePattern, + functionNameRegex, + listRoutinesExecutor, + catalogParam, + LOG); + shutdownExecutor(listRoutinesExecutor); + listRoutinesExecutor = null; + + if (functionIdsToGet.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info("Fetcher: No function IDs found or interrupted. Catalog: " + catalogParam); + return; + } + + getRoutineDetailsExecutor = + Executors.newFixedThreadPool( + this.metadataFetchThreadCount, + runnable -> + new Thread(runnable, "funcol-get-details" + fetcherThreadNameSuffix)); + List fullFunctions = + fetchFullRoutineDetailsForIds(functionIdsToGet, getRoutineDetailsExecutor, LOG); + shutdownExecutor(getRoutineDetailsExecutor); + getRoutineDetailsExecutor = null; + + if (fullFunctions.isEmpty() || Thread.currentThread().isInterrupted()) { + LOG.info( + "Fetcher: No full functions fetched or interrupted. Catalog: " + catalogParam); + return; + } + + processParamsExecutor = + Executors.newFixedThreadPool( + this.metadataFetchThreadCount, + runnable -> + new Thread(runnable, "funcol-proc-params" + fetcherThreadNameSuffix)); + submitFunctionParameterProcessingJobs( + fullFunctions, + columnNameRegex, + collectedResults, + resultSchemaFields, + processParamsExecutor, + processingTaskFutures, + LOG); + + if (Thread.currentThread().isInterrupted()) { + LOG.warning( + "Fetcher: Interrupted before waiting for parameter processing. Catalog: " + + catalogParam); + processingTaskFutures.forEach(f -> f.cancel(true)); + } else { + LOG.fine( + "Fetcher: Waiting for " + + processingTaskFutures.size() + + " parameter processing tasks. Catalog: " + + catalogParam); + waitForTasksCompletion(processingTaskFutures); + LOG.fine( + "Fetcher: All parameter processing tasks completed or handled. Catalog: " + + catalogParam); + } + + if (!Thread.currentThread().isInterrupted()) { + Comparator comparator = + defineGetFunctionColumnsComparator(resultSchemaFields); + sortResults(collectedResults, comparator, "getFunctionColumns", LOG); + populateQueue(collectedResults, queue, resultSchemaFields); + } + + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Fetcher: Interrupted in main try block for catalog " + + catalogParam + + ". Error: " + + e.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } catch (Throwable t) { + LOG.severe( + "Fetcher: Unexpected error in main try block for catalog " + + catalogParam + + ". Error: " + + t.getMessage()); + processingTaskFutures.forEach(f -> f.cancel(true)); + } finally { + signalEndOfData(queue, resultSchemaFields); + if (listRoutinesExecutor != null) shutdownExecutor(listRoutinesExecutor); + if (getRoutineDetailsExecutor != null) shutdownExecutor(getRoutineDetailsExecutor); + if (processParamsExecutor != null) shutdownExecutor(processParamsExecutor); + LOG.info("Function column fetcher thread finished for catalog: " + catalogParam); + } + }; + + Thread fetcherThread = + new Thread(functionColumnFetcher, "getFunctionColumns-fetcher-" + catalog); + BigQueryJsonResultSet resultSet = + BigQueryJsonResultSet.of( + resultSchema, -1, queue, this.statement, new Thread[] {fetcherThread}); + + fetcherThread.start(); + LOG.info("Started background thread for getFunctionColumns for catalog: " + catalog); + return resultSet; + } + + Schema defineGetFunctionColumnsSchema() { + List fields = new ArrayList<>(17); + fields.add( + Field.newBuilder("FUNCTION_CAT", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 1 + fields.add( + Field.newBuilder("FUNCTION_SCHEM", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 2 + fields.add( + Field.newBuilder("FUNCTION_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 3 + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 4 + fields.add( + Field.newBuilder("COLUMN_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 5 + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 6 + fields.add( + Field.newBuilder("TYPE_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 7 + fields.add( + Field.newBuilder("PRECISION", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 8 + fields.add( + Field.newBuilder("LENGTH", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); // 9 + fields.add( + Field.newBuilder("SCALE", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); // 10 + fields.add( + Field.newBuilder("RADIX", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); // 11 + fields.add( + Field.newBuilder("NULLABLE", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 12 + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build()); // 13 + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Mode.NULLABLE) + .build()); // 14 + fields.add( + Field.newBuilder("ORDINAL_POSITION", StandardSQLTypeName.INT64) + .setMode(Mode.REQUIRED) + .build()); // 15 + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 16 + fields.add( + Field.newBuilder("SPECIFIC_NAME", StandardSQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .build()); // 17 + return Schema.of(fields); + } + + List listMatchingFunctionIdsFromDatasets( + List datasetsToScan, + String functionNamePattern, + Pattern functionNameRegex, + ExecutorService listRoutinesExecutor, + String catalogParam, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + + logger.fine( + "Listing matching function IDs from %d datasets for catalog '%s'.", + datasetsToScan.size(), catalogParam); + final List>> listRoutineFutures = new ArrayList<>(); + final List functionIdsToGet = Collections.synchronizedList(new ArrayList<>()); + + for (Dataset dataset : datasetsToScan) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted during submission of routine (function) listing tasks for catalog: " + + catalogParam); + throw new InterruptedException("Interrupted while listing functions"); + } + final DatasetId currentDatasetId = dataset.getDatasetId(); + Callable> listCallable = + () -> + findMatchingBigQueryObjects( + "Routine", + () -> + bigquery.listRoutines( + currentDatasetId, RoutineListOption.pageSize(DEFAULT_PAGE_SIZE)), + (name) -> + bigquery.getRoutine( + RoutineId.of( + currentDatasetId.getProject(), currentDatasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + functionNamePattern, + functionNameRegex, + logger); + listRoutineFutures.add(listRoutinesExecutor.submit(listCallable)); + } + logger.fine( + "Submitted " + + listRoutineFutures.size() + + " routine (function) list tasks for catalog: " + + catalogParam); + + for (Future> listFuture : listRoutineFutures) { + if (Thread.currentThread().isInterrupted()) { + logger.warning( + "Interrupted while collecting routine (function) list results for catalog: " + + catalogParam); + listRoutineFutures.forEach(f -> f.cancel(true)); + throw new InterruptedException("Interrupted while collecting function lists"); + } + try { + List listedRoutines = listFuture.get(); + if (listedRoutines != null) { + for (Routine listedRoutine : listedRoutines) { + if (listedRoutine != null + && ("SCALAR_FUNCTION".equalsIgnoreCase(listedRoutine.getRoutineType()) + || "TABLE_FUNCTION".equalsIgnoreCase(listedRoutine.getRoutineType()))) { + if (listedRoutine.getRoutineId() != null) { + functionIdsToGet.add(listedRoutine.getRoutineId()); + } else { + logger.warning( + "Found a function type routine with a null ID during listing phase for catalog:" + + " " + + catalogParam); + } + } + } + } + } catch (ExecutionException e) { + logger.warning( + "Error getting routine (function) list result for catalog " + + catalogParam + + ": " + + e.getCause()); + } catch (CancellationException e) { + logger.warning("Routine (function) list task cancelled for catalog: " + catalogParam); + } + } + logger.info( + "Found %d function IDs to fetch details for in catalog '%s'.", + functionIdsToGet.size(), catalogParam); + return functionIdsToGet; + } + + void submitFunctionParameterProcessingJobs( + List fullFunctions, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields, + ExecutorService processParamsExecutor, + List> outParameterProcessingFutures, + BigQueryJdbcCustomLogger logger) + throws InterruptedException { + logger.fine("Submitting parameter processing jobs for %d functions.", fullFunctions.size()); + + for (Routine fullFunction : fullFunctions) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Interrupted during submission of function parameter processing tasks."); + throw new InterruptedException( + "Interrupted while submitting function parameter processing jobs"); + } + if (fullFunction != null) { + String routineType = fullFunction.getRoutineType(); + if ("SCALAR_FUNCTION".equalsIgnoreCase(routineType) + || "TABLE_FUNCTION".equalsIgnoreCase(routineType)) { + final Routine finalFullFunction = fullFunction; + Future processFuture = + processParamsExecutor.submit( + () -> + processFunctionParametersAndReturnValue( + finalFullFunction, + columnNameRegex, + collectedResults, + resultSchemaFields)); + outParameterProcessingFutures.add(processFuture); + } else { + logger.warning( + "Routine " + + (fullFunction.getRoutineId() != null + ? fullFunction.getRoutineId().toString() + : "UNKNOWN_ID") + + " fetched for getFunctionColumns was not of a function type (Type: " + + routineType + + "). Skipping parameter processing."); + } + } + } + logger.fine( + "Finished submitting " + + outParameterProcessingFutures.size() + + " processFunctionParametersAndReturnValue tasks."); + } + + void processFunctionParametersAndReturnValue( + Routine routine, + Pattern columnNameRegex, + List collectedResults, + FieldList resultSchemaFields) { + RoutineId routineId = routine.getRoutineId(); + if (routineId == null) { + LOG.warning("Processing a routine with a null ID. Skipping."); + return; + } + LOG.finer("Processing function parameters and return value for: " + routineId); + + String functionCatalog = routineId.getProject(); + String functionSchema = routineId.getDataset(); + String functionName = routineId.getRoutine(); + String specificName = functionName; + + if (routine.getReturnTableType() != null) { + StandardSQLTableType returnTableType = routine.getReturnTableType(); + if (returnTableType != null && returnTableType.getColumns() != null) { + List tableColumns = returnTableType.getColumns(); + for (int i = 0; i < tableColumns.size(); i++) { + StandardSQLField tableColumn = tableColumns.get(i); + String columnName = tableColumn.getName(); + if (columnNameRegex != null + && (columnName == null || !columnNameRegex.matcher(columnName).matches())) { + continue; + } + List rowValues = + createFunctionColumnRow( + functionCatalog, + functionSchema, + functionName, + specificName, + columnName, + DatabaseMetaData.functionColumnResult, + tableColumn.getDataType(), + i + 1); + collectedResults.add(FieldValueList.of(rowValues, resultSchemaFields)); + } + } + } + + List arguments = routine.getArguments(); + if (arguments != null) { + for (int i = 0; i < arguments.size(); i++) { + RoutineArgument arg = arguments.get(i); + String argName = arg.getName(); + + if (columnNameRegex != null + && (argName == null || !columnNameRegex.matcher(argName).matches())) { + continue; + } + + short columnType; + String originalMode = arg.getMode(); + + if ("IN".equalsIgnoreCase(originalMode)) { + columnType = DatabaseMetaData.functionColumnIn; + } else if ("OUT".equalsIgnoreCase(originalMode)) { + columnType = DatabaseMetaData.functionColumnOut; + } else if ("INOUT".equalsIgnoreCase(originalMode)) { + columnType = DatabaseMetaData.functionColumnInOut; + } else { + columnType = DatabaseMetaData.functionColumnUnknown; + } + + List rowValues = + createFunctionColumnRow( + functionCatalog, + functionSchema, + functionName, + specificName, + argName, + columnType, + arg.getDataType(), + i + 1); + collectedResults.add(FieldValueList.of(rowValues, resultSchemaFields)); + } + } + } + + List createFunctionColumnRow( + String functionCatalog, + String functionSchema, + String functionName, + String specificName, + String columnName, + int columnType, + StandardSQLDataType dataType, + int ordinalPosition) { + + List values = new ArrayList<>(17); + ColumnTypeInfo typeInfo = + determineTypeInfoFromDataType(dataType, functionName, columnName, ordinalPosition); + + values.add(createStringFieldValue(functionCatalog)); // 1. FUNCTION_CAT + values.add(createStringFieldValue(functionSchema)); // 2. FUNCTION_SCHEM + values.add(createStringFieldValue(functionName)); // 3. FUNCTION_NAME + values.add(createStringFieldValue(columnName)); // 4. COLUMN_NAME + values.add(createLongFieldValue((long) columnType)); // 5. COLUMN_TYPE + + values.add(createLongFieldValue((long) typeInfo.jdbcType)); // 6. DATA_TYPE + values.add(createStringFieldValue(typeInfo.typeName)); // 7. TYPE_NAME + values.add( + createLongFieldValue( + typeInfo.columnSize == null ? null : typeInfo.columnSize.longValue())); // 8. PRECISION + if (typeInfo.typeName != null + && (typeInfo.typeName.equalsIgnoreCase("STRING") + || typeInfo.typeName.equalsIgnoreCase("NVARCHAR") + || typeInfo.typeName.equalsIgnoreCase("BYTES") + || typeInfo.typeName.equalsIgnoreCase("VARBINARY"))) { + values.add( + createLongFieldValue( + typeInfo.columnSize == null ? null : typeInfo.columnSize.longValue())); // 9. LENGTH + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 10. SCALE + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null + ? null + : typeInfo.numPrecRadix.longValue())); // 11. RADIX + values.add( + createLongFieldValue((long) DatabaseMetaData.functionNullableUnknown)); // 12. NULLABLE + values.add(createStringFieldValue(null)); // 13. REMARKS + values.add( + createLongFieldValue( + typeInfo.columnSize == null + ? null + : typeInfo.columnSize.longValue())); // 14. CHAR_OCTET_LENGTH + } else { + values.add(createNullFieldValue()); // 9. LENGTH + values.add( + createLongFieldValue( + typeInfo.decimalDigits == null + ? null + : typeInfo.decimalDigits.longValue())); // 10. SCALE + values.add( + createLongFieldValue( + typeInfo.numPrecRadix == null + ? null + : typeInfo.numPrecRadix.longValue())); // 11. RADIX + values.add( + createLongFieldValue((long) DatabaseMetaData.functionNullableUnknown)); // 12. NULLABLE + values.add(createStringFieldValue(null)); // 13. REMARKS + values.add(createNullFieldValue()); // 14. CHAR_OCTET_LENGTH + } + + values.add(createLongFieldValue((long) ordinalPosition)); // 15. ORDINAL_POSITION + values.add(createStringFieldValue("")); // 16. IS_NULLABLE + values.add(createStringFieldValue(specificName)); // 17. SPECIFIC_NAME + + return values; + } + + Comparator defineGetFunctionColumnsComparator(FieldList resultSchemaFields) { + final int FUNC_CAT_IDX = resultSchemaFields.getIndex("FUNCTION_CAT"); + final int FUNC_SCHEM_IDX = resultSchemaFields.getIndex("FUNCTION_SCHEM"); + final int FUNC_NAME_IDX = resultSchemaFields.getIndex("FUNCTION_NAME"); + final int SPEC_NAME_IDX = resultSchemaFields.getIndex("SPECIFIC_NAME"); + final int ORDINAL_POS_IDX = resultSchemaFields.getIndex("ORDINAL_POSITION"); + + return Comparator.comparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_CAT_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_SCHEM_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, FUNC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getStringValueOrNull(fvl, SPEC_NAME_IDX), + Comparator.nullsFirst(String::compareToIgnoreCase)) + .thenComparing( + (FieldValueList fvl) -> getLongValueOrNull(fvl, ORDINAL_POS_IDX), + Comparator.nullsFirst(Long::compareTo)); + } + + @Override + public ResultSet getPseudoColumns( + String catalog, String schemaPattern, String tableNamePattern, String columnNamePattern) { + LOG.info( + "getPseudoColumns called for catalog: %s, schemaPattern: %s, tableNamePattern: %s," + + " columnNamePattern: %s. Pseudo columns not supported by BigQuery; returning" + + " empty ResultSet.", + catalog, schemaPattern, tableNamePattern, columnNamePattern); + + final Schema resultSchema = defineGetPseudoColumnsSchema(); + final FieldList resultSchemaFields = resultSchema.getFields(); + final BlockingQueue queue = new LinkedBlockingQueue<>(1); + signalEndOfData(queue, resultSchemaFields); + + return BigQueryJsonResultSet.of(resultSchema, 0, queue, this.statement, null); + } + + Schema defineGetPseudoColumnsSchema() { + List fields = new ArrayList<>(12); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DATA_TYPE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("COLUMN_SIZE", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("DECIMAL_DIGITS", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("NUM_PREC_RADIX", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("COLUMN_USAGE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("REMARKS", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("CHAR_OCTET_LENGTH", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("IS_NULLABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + return Schema.of(fields); + } + + @Override + public boolean generatedKeyAlwaysReturned() { + return false; + } + + @Override + public T unwrap(Class iface) { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) { + return false; + } + + // --- Helper Methods --- + + /** + * Determines the effective catalog and schema pattern to use for metadata retrieval. + * + *

This method applies the logic for the {@code FilterTablesOnDefaultDataset} connection + * property. If this property is enabled and the provided {@code catalog} or {@code schemaPattern} + * are null, empty, or wildcard ('%'), they may be overridden by the default catalog (project) and + * default dataset (schema) configured in the {@link BigQueryConnection}. + * + * @param catalog The catalog name provided by the user; may be {@code null}. + * @param schemaPattern The schema name pattern provided by the user; may be {@code null}. + * @return A {@link Tuple} where {@code Tuple.x()} is the effective catalog string and {@code + * Tuple.y()} is the effective schema pattern string. These are the values that should be used + * for querying BigQuery's metadata. + * @see BigQueryConnection#isFilterTablesOnDefaultDataset() + */ + private Tuple determineEffectiveCatalogAndSchema( + String catalog, String schemaPattern) { + String effectiveCatalog = catalog; + String effectiveSchemaPattern = schemaPattern; + + if (this.connection.isFilterTablesOnDefaultDataset() + && this.connection.getDefaultDataset() != null + && this.connection.getDefaultDataset().getDataset() != null + && !this.connection.getDefaultDataset().getDataset().isEmpty()) { + + String defaultProjectFromConnection = this.connection.getCatalog(); + // We only use the dataset part of the DefaultDataset for schema filtering + String defaultSchemaFromConnection = this.connection.getDefaultDataset().getDataset(); + + boolean catalogIsNullOrEmptyOrWildcard = + (catalog == null || catalog.isEmpty() || catalog.equals("%")); + boolean schemaPatternIsNullOrEmptyOrWildcard = + (schemaPattern == null || schemaPattern.isEmpty() || schemaPattern.equals("%")); + + final String logPrefix = "FilterTablesOnDefaultDatasetTrue: "; + if (catalogIsNullOrEmptyOrWildcard && schemaPatternIsNullOrEmptyOrWildcard) { + effectiveCatalog = defaultProjectFromConnection; + effectiveSchemaPattern = defaultSchemaFromConnection; + LOG.info( + logPrefix + "Using default catalog '%s' and default dataset '%s'.", + effectiveCatalog, + effectiveSchemaPattern); + } else if (catalogIsNullOrEmptyOrWildcard) { + effectiveCatalog = defaultProjectFromConnection; + LOG.info( + logPrefix + + "Using default catalog '%s' with user dataset '%s'. Default dataset '%s' ignored.", + effectiveCatalog, + effectiveSchemaPattern, + defaultSchemaFromConnection); + } else if (schemaPatternIsNullOrEmptyOrWildcard) { + effectiveSchemaPattern = defaultSchemaFromConnection; + LOG.info( + logPrefix + "Using user catalog '%s' and default dataset '%s'.", + effectiveCatalog, + effectiveSchemaPattern); + } else { + LOG.info( + logPrefix + "Using user catalog '%s' and schema '%s'. Default dataset '%s' ignored.", + effectiveCatalog, + effectiveSchemaPattern, + defaultSchemaFromConnection); + } + } + return Tuple.of(effectiveCatalog, effectiveSchemaPattern); + } + + private ColumnTypeInfo getColumnTypeInfoForSqlType(StandardSQLTypeName bqType) { + if (bqType == null) { + LOG.warning("Null BigQuery type encountered: " + bqType.name() + ". Mapping to VARCHAR."); + return new ColumnTypeInfo(Types.VARCHAR, bqType.name(), null, null, null); + } + + switch (bqType) { + case INT64: + return new ColumnTypeInfo(Types.BIGINT, "BIGINT", 19, 0, 10); + case BOOL: + return new ColumnTypeInfo(Types.BOOLEAN, "BOOLEAN", 1, null, null); + case FLOAT64: + return new ColumnTypeInfo(Types.DOUBLE, "DOUBLE", 15, null, 10); + case NUMERIC: + return new ColumnTypeInfo(Types.NUMERIC, "NUMERIC", 38, 9, 10); + case BIGNUMERIC: + return new ColumnTypeInfo(Types.NUMERIC, "NUMERIC", 77, 38, 10); + case STRING: + return new ColumnTypeInfo(Types.NVARCHAR, "NVARCHAR", null, null, null); + case TIMESTAMP: + case DATETIME: + return new ColumnTypeInfo(Types.TIMESTAMP, "TIMESTAMP", 29, null, null); + case DATE: + return new ColumnTypeInfo(Types.DATE, "DATE", 10, null, null); + case TIME: + return new ColumnTypeInfo(Types.TIME, "TIME", 15, null, null); + case GEOGRAPHY: + case JSON: + case INTERVAL: + return new ColumnTypeInfo(Types.VARCHAR, "VARCHAR", null, null, null); + case BYTES: + return new ColumnTypeInfo(Types.VARBINARY, "VARBINARY", null, null, null); + case STRUCT: + return new ColumnTypeInfo(Types.STRUCT, "STRUCT", null, null, null); + default: + LOG.warning( + "Unknown BigQuery type encountered: " + bqType.name() + ". Mapping to VARCHAR."); + return new ColumnTypeInfo(Types.VARCHAR, bqType.name(), null, null, null); + } + } + + List findMatchingBigQueryObjects( + String objectTypeName, + Supplier> listAllOperation, + Function getSpecificOperation, + Function nameExtractor, + String pattern, + Pattern regex, + BigQueryJdbcCustomLogger logger) { + + boolean needsList = needsListing(pattern); + List resultList = new ArrayList<>(); + + try { + Iterable objects; + if (needsList) { + logger.info( + "Listing all %ss (pattern: %s)...", + objectTypeName, pattern == null ? "" : pattern); + Page firstPage = listAllOperation.get(); + objects = firstPage.iterateAll(); + logger.fine( + "Retrieved initial %s list, iterating & filtering if needed...", objectTypeName); + + } else { + logger.info("Getting specific %s: '%s'", objectTypeName, pattern); + T specificObject = getSpecificOperation.apply(pattern); + objects = + (specificObject == null) + ? Collections.emptyList() + : Collections.singletonList(specificObject); + if (specificObject == null) { + logger.info("Specific %s not found: '%s'", objectTypeName, pattern); + } + } + + boolean wasListing = needsList; + for (T obj : objects) { + if (Thread.currentThread().isInterrupted()) { + logger.warning("Thread interrupted during " + objectTypeName + " processing loop."); + throw new InterruptedException( + "Interrupted during " + objectTypeName + " processing loop"); + } + if (obj != null) { + if (wasListing && regex != null) { + String name = nameExtractor.apply(obj); + if (name != null && regex.matcher(name).matches()) { + resultList.add(obj); + } + } else { + resultList.add(obj); + } + } + } + + } catch (BigQueryException e) { + if (!needsList && e.getCode() == 404) { + logger.info("%s '%s' not found (API error 404).", objectTypeName, pattern); + } else { + logger.warning( + "BigQueryException finding %ss for pattern '%s': %s (Code: %d)", + objectTypeName, pattern, e.getMessage(), e.getCode()); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + logger.warning("Interrupted while finding " + objectTypeName + "s."); + } catch (Exception e) { + logger.severe( + "Unexpected exception finding %ss for pattern '%s': %s", + objectTypeName, pattern, e.getMessage()); + } + return resultList; + } + + private static class TypeInfoRowData { + String typeName; + int jdbcType; + Long precision; + String literalPrefix; + String literalSuffix; + String createParams; + int nullable; + boolean caseSensitive; + int searchable; + boolean unsignedAttribute; + boolean fixedPrecScale; + boolean autoIncrement; + String localTypeName; + Long minimumScale; + Long maximumScale; + Long numPrecRadix; + + TypeInfoRowData( + String typeName, + int jdbcType, + Long precision, + String literalPrefix, + String literalSuffix, + String createParams, + int nullable, + boolean caseSensitive, + int searchable, + boolean unsignedAttribute, + boolean fixedPrecScale, + boolean autoIncrement, + String localTypeName, + Long minimumScale, + Long maximumScale, + Long numPrecRadix) { + this.typeName = typeName; + this.jdbcType = jdbcType; + this.precision = precision; + this.literalPrefix = literalPrefix; + this.literalSuffix = literalSuffix; + this.createParams = createParams; + this.nullable = nullable; + this.caseSensitive = caseSensitive; + this.searchable = searchable; + this.unsignedAttribute = unsignedAttribute; + this.fixedPrecScale = fixedPrecScale; + this.autoIncrement = autoIncrement; + this.localTypeName = localTypeName; + this.minimumScale = minimumScale; + this.maximumScale = maximumScale; + this.numPrecRadix = numPrecRadix; + } + } + + void sortResults( + List collectedResults, + Comparator comparator, + String operationName, + BigQueryJdbcCustomLogger logger) { + + if (collectedResults == null || collectedResults.isEmpty()) { + logger.info("No results collected for %s, skipping sort.", operationName); + return; + } + if (comparator == null) { + logger.info("No comparator provided for %s, skipping sort.", operationName); + return; + } + + logger.info("Sorting %d collected %s results...", collectedResults.size(), operationName); + try { + collectedResults.sort(comparator); + logger.info("%s result sorting completed.", operationName); + } catch (Exception e) { + logger.severe("Error during sorting %s results: %s", operationName, e.getMessage()); + } + } + + private List defineBasePrivilegeFields() { + List fields = new ArrayList<>(7); + fields.add( + Field.newBuilder("TABLE_CAT", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_SCHEM", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("TABLE_NAME", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("GRANTOR", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + fields.add( + Field.newBuilder("GRANTEE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("PRIVILEGE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REQUIRED) + .build()); + fields.add( + Field.newBuilder("IS_GRANTABLE", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + return fields; + } + + Pattern compileSqlLikePattern(String sqlLikePattern) { + if (sqlLikePattern == null) { + return null; + } + if (sqlLikePattern.isEmpty()) { + return Pattern.compile("(?!)"); + } + StringBuilder regex = new StringBuilder(sqlLikePattern.length() * 2); + regex.append('^'); + for (int i = 0; i < sqlLikePattern.length(); i++) { + char c = sqlLikePattern.charAt(i); + switch (c) { + case '%': + regex.append(".*"); + break; + case '_': + regex.append('.'); + break; + case '\\': + case '.': + case '[': + case ']': + case '(': + case ')': + case '{': + case '}': + case '*': + case '+': + case '?': + case '^': + case '$': + case '|': + regex.append('\\').append(c); + break; + default: + regex.append(c); + break; + } + } + regex.append('$'); + return Pattern.compile(regex.toString(), Pattern.CASE_INSENSITIVE); + } + + boolean needsListing(String pattern) { + return pattern == null || pattern.contains("%") || pattern.contains("_"); + } + + FieldValue createStringFieldValue(String value) { + return FieldValue.of(FieldValue.Attribute.PRIMITIVE, value); + } + + FieldValue createLongFieldValue(Long value) { + return (value == null) + ? FieldValue.of(FieldValue.Attribute.PRIMITIVE, null) + : FieldValue.of(FieldValue.Attribute.PRIMITIVE, String.valueOf(value)); + } + + FieldValue createNullFieldValue() { + return FieldValue.of(FieldValue.Attribute.PRIMITIVE, null); + } + + FieldValue createBooleanFieldValue(Boolean value) { + return (value == null) + ? FieldValue.of(FieldValue.Attribute.PRIMITIVE, null) + : FieldValue.of(FieldValue.Attribute.PRIMITIVE, value ? "1" : "0"); + } + + private String getStringValueOrNull(FieldValueList fvl, int index) { + if (fvl == null || index < 0 || index >= fvl.size()) return null; + FieldValue fv = fvl.get(index); + return (fv == null || fv.isNull()) ? null : fv.getStringValue(); + } + + private Long getLongValueOrNull(FieldValueList fvl, int index) { + if (fvl == null || index < 0 || index >= fvl.size()) return null; + FieldValue fv = fvl.get(index); + try { + return (fv == null || fv.isNull()) ? null : fv.getLongValue(); + } catch (NumberFormatException e) { + LOG.warning("Could not parse Long value for index " + index); + return null; + } + } + + private void waitForTasksCompletion(List> taskFutures) { + LOG.info("Waiting for %d submitted tasks to complete...", taskFutures.size()); + for (Future future : taskFutures) { + try { + if (!future.isCancelled()) { + future.get(); + } + } catch (CancellationException e) { + LOG.warning("A table processing task was cancelled."); + } catch (ExecutionException e) { + LOG.severe( + "Error executing table processing task: %s", + (e.getCause() != null ? e.getCause().getMessage() : e.getMessage())); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning( + "Fetcher thread interrupted while waiting for tasks. Attempting to cancel remaining" + + " tasks."); + taskFutures.forEach(f -> f.cancel(true)); + break; + } + } + LOG.info("Finished waiting for tasks."); + } + + private void populateQueue( + List collectedResults, + BlockingQueue queue, + FieldList resultSchemaFields) { + LOG.info("Populating queue with %d results...", collectedResults.size()); + try { + for (FieldValueList sortedRow : collectedResults) { + if (Thread.currentThread().isInterrupted()) { + LOG.warning("Interrupted during queue population."); + break; + } + queue.put(BigQueryFieldValueListWrapper.of(resultSchemaFields, sortedRow)); + } + LOG.info("Finished populating queue."); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Interrupted while putting row onto queue."); + } catch (Exception e) { + LOG.severe("Unexpected error populating queue: " + e.getMessage()); + } + } + + private void signalEndOfData( + BlockingQueue queue, FieldList resultSchemaFields) { + try { + LOG.info("Adding end signal to queue."); + queue.put(BigQueryFieldValueListWrapper.of(resultSchemaFields, null, true)); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + LOG.warning("Interrupted while sending end signal to queue."); + } catch (Exception e) { + LOG.severe("Exception while sending end signal to queue: " + e.getMessage()); + } + } + + private void shutdownExecutor(ExecutorService executor) { + if (executor == null || executor.isShutdown()) { + return; + } + LOG.info("Shutting down column executor service..."); + executor.shutdown(); + try { + if (!executor.awaitTermination(10, TimeUnit.SECONDS)) { + LOG.warning("Executor did not terminate gracefully after 10s, forcing shutdownNow()."); + List droppedTasks = executor.shutdownNow(); + LOG.warning( + "Executor shutdownNow() initiated. Dropped tasks count: " + droppedTasks.size()); + if (!executor.awaitTermination(10, TimeUnit.SECONDS)) { + LOG.severe("Executor did not terminate even after shutdownNow()."); + } + } + LOG.info("Executor shutdown complete."); + } catch (InterruptedException ie) { + LOG.warning( + "Interrupted while waiting for executor termination. Forcing shutdownNow() again."); + executor.shutdownNow(); + Thread.currentThread().interrupt(); + } + } + + private String getCurrentCatalogName() { + return this.connection.getCatalog(); + } + + private List getAccessibleCatalogNames() { + Set accessibleCatalogs = new HashSet<>(); + String primaryCatalog = getCurrentCatalogName(); + if (primaryCatalog != null && !primaryCatalog.isEmpty()) { + accessibleCatalogs.add(primaryCatalog); + } + + String additionalProjectsStr = this.connection.getAdditionalProjects(); + if (additionalProjectsStr != null && !additionalProjectsStr.trim().isEmpty()) { + List additionalProjects = + com.google.common.base.Splitter.on(',') + .trimResults() + .omitEmptyStrings() + .splitToList(additionalProjectsStr); + for (String project : additionalProjects) { + if (project != null && !project.isEmpty()) { + accessibleCatalogs.add(project); + } + } + } + + List sortedCatalogs = new ArrayList<>(accessibleCatalogs); + Collections.sort(sortedCatalogs); + return sortedCatalogs; + } + + static String readSqlFromFile(String filename) { + InputStream in; + in = BigQueryDatabaseMetaData.class.getResourceAsStream(filename); + BufferedReader reader = new BufferedReader(new InputStreamReader(in)); + StringBuilder builder = new StringBuilder(); + try (Scanner scanner = new Scanner(reader)) { + while (scanner.hasNextLine()) { + String line = scanner.nextLine(); + builder.append(line).append("\n"); + } + } + return builder.toString(); + } + + String replaceSqlParameters(String sql, String... params) throws SQLException { + return String.format(sql, (Object[]) params); + } + + private void loadDriverVersionProperties() { + if (parsedDriverVersion.get() != null) { + return; + } + Properties props = new Properties(); + try (InputStream input = + getClass().getResourceAsStream("/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (input == null) { + String errorMessage = + "Could not find dependencies.properties. Driver version information is unavailable."; + LOG.severe(errorMessage); + throw new IllegalStateException(errorMessage); + } + props.load(input); + String versionString = props.getProperty("version.jdbc"); + if (versionString == null || versionString.trim().isEmpty()) { + String errorMessage = + "The property version.jdbc not found or empty in dependencies.properties."; + LOG.severe(errorMessage); + throw new IllegalStateException(errorMessage); + } + parsedDriverVersion.compareAndSet(null, versionString.trim()); + String[] parts = versionString.split("\\."); + if (parts.length < 2) { + return; + } + parsedDriverMajorVersion.compareAndSet(null, Integer.parseInt(parts[0])); + String minorPart = parts[1]; + String numericMinor = minorPart.replaceAll("[^0-9].*", ""); + if (!numericMinor.isEmpty()) { + parsedDriverMinorVersion.compareAndSet(null, Integer.parseInt(numericMinor)); + } + } catch (IOException | NumberFormatException e) { + String errorMessage = + "Error reading dependencies.properties. Driver version information is" + + " unavailable. Error: " + + e.getMessage(); + LOG.severe(errorMessage); + throw new IllegalStateException(errorMessage, e); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercions.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercions.java new file mode 100644 index 0000000000..324888982a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercions.java @@ -0,0 +1,102 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.math.RoundingMode; + +/** A registry of all the inbuilt {@link BigQueryCoercion}s that framework offers by default. */ +@InternalApi +class BigQueryDefaultCoercions { + + /** + * Creates a {@link BigQueryTypeCoercerBuilder} with all the inbuilt {@link BigQueryCoercion}s. + *

  • {@link BigQueryTypeCoercer#INSTANCE} uses this builder to populate itself with all the + * default {@link BigQueryCoercion}s. + *
  • A {@link BigQueryTypeCoercerBuilder} created through {@link BigQueryTypeCoercer#builder()} + * method also contains all these default {@link BigQueryCoercion}s + */ + static BigQueryTypeCoercerBuilder builder() { + BigQueryTypeCoercerBuilder builder = new BigQueryTypeCoercerBuilder(); + + // TODO: can we figure out the class parameters from coercion itself? + builder.registerTypeCoercion( + (String s) -> s != null && ("1".equals(s.trim()) || Boolean.parseBoolean(s)), + String.class, + Boolean.class); + builder.registerTypeCoercion(Integer::parseInt, String.class, Integer.class); + builder.registerTypeCoercion(BigInteger::new, String.class, BigInteger.class); + builder.registerTypeCoercion(Long::valueOf, String.class, Long.class); + builder.registerTypeCoercion(Double::valueOf, String.class, Double.class); + builder.registerTypeCoercion(BigDecimal::new, String.class, BigDecimal.class); + + builder.registerTypeCoercion((b) -> b ? 1 : 0, Boolean.class, Integer.class); + + builder.registerTypeCoercion(Integer::intValue, Integer.class, Integer.class); + builder.registerTypeCoercion(Integer::shortValue, Integer.class, Short.class); + builder.registerTypeCoercion(Integer::byteValue, Integer.class, Byte.class); + builder.registerTypeCoercion(Integer::doubleValue, Integer.class, Double.class); + builder.registerTypeCoercion(Integer::floatValue, Integer.class, Float.class); + + builder.registerTypeCoercion(Long::intValue, Long.class, Integer.class); + builder.registerTypeCoercion(Long::shortValue, Long.class, Short.class); + builder.registerTypeCoercion(Long::byteValue, Long.class, Byte.class); + builder.registerTypeCoercion(Long::doubleValue, Long.class, Double.class); + builder.registerTypeCoercion(Long::floatValue, Long.class, Float.class); + + builder.registerTypeCoercion(Double::floatValue, Double.class, Float.class); + builder.registerTypeCoercion(Double::longValue, Double.class, Long.class); + builder.registerTypeCoercion(Double::intValue, Double.class, Integer.class); + builder.registerTypeCoercion(Double::shortValue, Double.class, Short.class); + builder.registerTypeCoercion(Double::byteValue, Double.class, Byte.class); + builder.registerTypeCoercion(BigDecimal::valueOf, Double.class, BigDecimal.class); + + builder.registerTypeCoercion(Float::intValue, Float.class, Integer.class); + builder.registerTypeCoercion(Float::byteValue, Float.class, Byte.class); + builder.registerTypeCoercion(Float::shortValue, Float.class, Short.class); + builder.registerTypeCoercion(Float::doubleValue, Float.class, Double.class); + + builder.registerTypeCoercion(BigInteger::longValue, BigInteger.class, Long.class); + builder.registerTypeCoercion(BigDecimal::new, BigInteger.class, BigDecimal.class); + + builder.registerTypeCoercion(BigDecimal::doubleValue, BigDecimal.class, Double.class); + builder.registerTypeCoercion(BigDecimal::toBigInteger, BigDecimal.class, BigInteger.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).intValueExact(), + BigDecimal.class, + Integer.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).longValueExact(), + BigDecimal.class, + Long.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).shortValueExact(), + BigDecimal.class, + Short.class); + builder.registerTypeCoercion( + bigDecimal -> bigDecimal.setScale(0, RoundingMode.DOWN).byteValueExact(), + BigDecimal.class, + Byte.class); + builder.registerTypeCoercion(BigDecimal::floatValue, BigDecimal.class, Float.class); + + builder.registerTypeCoercion(unused -> false, Void.class, Boolean.class); + + return builder; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDriver.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDriver.java new file mode 100644 index 0000000000..930fc42af2 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryDriver.java @@ -0,0 +1,254 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import io.grpc.LoadBalancerRegistry; +import io.grpc.internal.PickFirstLoadBalancerProvider; +import java.io.IOException; +import java.sql.Connection; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.logging.Level; +import java.util.logging.Logger; + +/** + * JDBC {@link Driver} implementation for BigQuery. + * + *

    Usage: + * + *

    + *  String CONNECTION_URL = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443"
    + *            + "ProjectId=test;OAuthType=3""
    + *  Connection connection = DriverManager.getConnection(CONNECTION_URL);
    + * 
    + * + *

    Valid URLs take the form: + * + *

    + *  jdbc:bigquery://{host}:{port};ProjectId={projectId};OAuthType={oAuthType};
    + *  {property1}={value1};{property2}={value2};...
    + * 
    + */ +public class BigQueryDriver implements Driver { + + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryDriver.class.getName()); + // TODO: update this when JDBC goes GA + private static final int JDBC_MAJOR_VERSION = 0; + private static final int JDBC_MINOR_VERSION = 1; + static BigQueryDriver registeredBigqueryJdbcDriver; + + static { + try { + register(); + } catch (SQLException e) { + throw new ExceptionInInitializerError("Registering driver failed: " + e.getMessage()); + } + LoadBalancerRegistry.getDefaultRegistry().register(new PickFirstLoadBalancerProvider()); + } + + public BigQueryDriver() {} + + static void register() throws SQLException { + LOG.finest("++enter++"); + if (isRegistered()) { + LOG.warning("Driver is already registered. It can only be registered once."); + } + + DriverManager.registerDriver(LazyHolder.INSTANCE); + BigQueryDriver.registeredBigqueryJdbcDriver = LazyHolder.INSTANCE; + } + + static boolean isRegistered() { + LOG.finest("++enter++"); + return registeredBigqueryJdbcDriver != null; + } + + /** + * @return the registered JDBC driver for BigQuery. + * @throws IllegalStateException if the driver has not been registered. + */ + public static BigQueryDriver getRegisteredDriver() throws IllegalStateException { + LOG.finest("++enter++"); + if (isRegistered()) { + return registeredBigqueryJdbcDriver; + } + throw new IllegalStateException( + "Driver is not registered (or it has not been registered using Driver.register() method)"); + } + + /** + * Attempts to establish a BigQuery connection to the given URL, using the provided connection + * Properties. + * + *

    Valid URLs take the form: + * + *

    +   *  jdbc:bigquery://{host}:{port};ProjectId={projectId};OAuthType={oAuthType};
    +   *  {property1}={value1};{property2}={value2};...
    +   * 
    + * + * @param url the BigQuery URL to connect to + * @param info a list of arbitrary string tag/value pairs as connection arguments. + * @return A connection to the URL if it was established successfully, otherwise {@code null} + * @throws SQLException if driver fails to connect to clients. + * @see Driver#connect(String, Properties) + */ + @Override + public Connection connect(String url, Properties info) throws SQLException { + LOG.finest("++enter++"); + try { + if (acceptsURL(url)) { + // strip 'jdbc:' from the URL, add any extra properties + String connectionUri = + BigQueryJdbcUrlUtility.appendPropertiesToURL(url.substring(5), this.toString(), info); + try { + BigQueryJdbcUrlUtility.parseUrl(connectionUri); + } catch (BigQueryJdbcRuntimeException e) { + throw new BigQueryJdbcException(e.getMessage(), e); + } + + DataSource ds = DataSource.fromUrl(connectionUri); + + // LogLevel + String logLevelStr = ds.getLogLevel(); + if (logLevelStr == null) { + logLevelStr = System.getenv(BigQueryJdbcUrlUtility.LOG_LEVEL_ENV_VAR); + } + Level logLevel = BigQueryJdbcUrlUtility.parseLogLevel(logLevelStr); + + // LogPath + String logPath = ds.getLogPath(); + if (logPath == null) { + logPath = System.getenv(BigQueryJdbcUrlUtility.LOG_PATH_ENV_VAR); + } + if (logPath == null) { + logPath = BigQueryJdbcUrlUtility.DEFAULT_LOG_PATH; + } + + BigQueryJdbcRootLogger.setLevel(logLevel, logPath); + + // Logging starts from here. + BigQueryConnection connection = new BigQueryConnection(connectionUri, ds); + LOG.info( + "Driver info : { {Database Product Name : %s}, " + + "{Database Product Version : %s}, " + + "{Driver Name : %s}, " + + "{Driver Version : %s}, " + + "{LogLevel : %s}, " + + "{LogPath : %s}, " + + "{Driver Instance : %s} }", + connection.getMetaData().getDatabaseProductName(), + connection.getMetaData().getDatabaseProductVersion(), + connection.getMetaData().getDriverName(), + connection.getMetaData().getDriverVersion(), + logLevel, + logPath, + this.toString()); + return connection; + } else { + return null; + } + } catch (IOException e) { + LOG.warning("Getting a warning: " + e.getMessage()); + } + return null; + } + + /** + * @param url a JDBC connection URL + * @return True if the URL is non-empty and starts with "jdbc:bigquery" + * @see Driver#acceptsURL(String) + */ + @Override + public boolean acceptsURL(String url) throws SQLException { + LOG.finest("++enter++"); + if (url == null || url.isEmpty()) { + throw new BigQueryJdbcException("Connection URL is null."); + } + return url.startsWith("jdbc:bigquery:"); + } + + /** + * Gets information about the possible BigQuery JDBC Connection Properties. + * + * @param url the BigQuery connection URL + * @param info a proposed list BigQuery connection properties + * @return an array of {@code DriverPropertyInfo} objects describing possible properties. + * @see Driver#getPropertyInfo(String, Properties) + */ + @Override + public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) { + LOG.finest("++enter++"); + String connectionUri = + BigQueryJdbcUrlUtility.appendPropertiesToURL(url.substring(5), this.toString(), info); + List propertyInfoList = new ArrayList<>(); + + for (BigQueryConnectionProperty prop : BigQueryJdbcUrlUtility.VALID_PROPERTIES) { + DriverPropertyInfo driverProperty = + new DriverPropertyInfo( + prop.getName(), + BigQueryJdbcUrlUtility.parseUriProperty(connectionUri, prop.getName())); + driverProperty.description = prop.getDescription(); + propertyInfoList.add(driverProperty); + } + + DataSource ds = DataSource.fromUrl(connectionUri); + Map oAuthProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(ds, this.toString()); + for (Map.Entry authProperty : oAuthProperties.entrySet()) { + propertyInfoList.add(new DriverPropertyInfo(authProperty.getKey(), authProperty.getValue())); + } + return propertyInfoList.toArray(new DriverPropertyInfo[0]); + } + + @Override + public int getMajorVersion() { + LOG.finest("++enter++"); + return JDBC_MAJOR_VERSION; + } + + @Override + public int getMinorVersion() { + LOG.finest("++enter++"); + return JDBC_MINOR_VERSION; + } + + @Override + public boolean jdbcCompliant() { + LOG.finest("++enter++"); + return false; + } + + @Override + public Logger getParentLogger() { + LOG.finest("++enter++"); + return BigQueryJdbcRootLogger.getRootLogger(); + } + + private static class LazyHolder { + static final BigQueryDriver INSTANCE = new BigQueryDriver(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryErrorMessage.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryErrorMessage.java new file mode 100644 index 0000000000..9afa074522 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryErrorMessage.java @@ -0,0 +1,29 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; + +@InternalApi +class BigQueryErrorMessage { + + static final String CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED = + "Customized Types are not supported yet."; + static final String INVALID_ARRAY = "Invalid java.sql.Array instance."; + static final String METHOD_NOT_IMPLEMENTED = "This method is not implemented."; + static final String OAUTH_TYPE_ERROR_MESSAGE = "Invalid Auth type specified"; +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryFieldValueListWrapper.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryFieldValueListWrapper.java new file mode 100644 index 0000000000..39740e0217 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryFieldValueListWrapper.java @@ -0,0 +1,93 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValueList; +import java.util.List; + +/** + * Package-private, This class acts as a facade layer and wraps the FieldList(schema) and + * FieldValueList + */ +class BigQueryFieldValueListWrapper { + + // This is a reference schema to the schema + private final FieldList fieldList; + + // POJO representation of the JSON response + private final FieldValueList fieldValueList; + + // This is very similar to the FieldValueList above, except we get the + // reference as a List in case of an Array + private final List arrayFieldValueList; + + // This flag marks the end of the stream for the ResultSet + private boolean isLast = false; + private final Exception exception; + + static BigQueryFieldValueListWrapper of( + FieldList fieldList, FieldValueList fieldValueList, boolean... isLast) { + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryFieldValueListWrapper(fieldList, fieldValueList, null, isLastFlag, null); + } + + static BigQueryFieldValueListWrapper getNestedFieldValueListWrapper( + FieldList fieldList, List arrayFieldValueList, boolean... isLast) { + boolean isLastFlag = isLast != null && isLast.length == 1 && isLast[0]; + return new BigQueryFieldValueListWrapper( + fieldList, null, arrayFieldValueList, isLastFlag, null); + } + + static BigQueryFieldValueListWrapper ofError(Exception exception) { + return new BigQueryFieldValueListWrapper(null, null, null, true, exception); + } + + private BigQueryFieldValueListWrapper( + FieldList fieldList, + FieldValueList fieldValueList, + List arrayFieldValueList, + boolean isLast, + Exception exception) { + this.fieldList = fieldList; + this.fieldValueList = fieldValueList; + this.arrayFieldValueList = arrayFieldValueList; + this.isLast = isLast; + this.exception = exception; + } + + public FieldList getFieldList() { + return this.fieldList; + } + + public FieldValueList getFieldValueList() { + return this.fieldValueList; + } + + public List getArrayFieldValueList() { + return this.arrayFieldValueList; + } + + public boolean isLast() { + return this.isLast; + } + + public Exception getException() { + return this.exception; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBulkInsertWriter.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBulkInsertWriter.java new file mode 100644 index 0000000000..d4e9702621 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBulkInsertWriter.java @@ -0,0 +1,130 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.ApiFuture; +import com.google.api.core.ApiFutureCallback; +import com.google.api.core.ApiFutures; +import com.google.api.gax.retrying.RetrySettings; +import com.google.cloud.bigquery.storage.v1.AppendRowsResponse; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import com.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest; +import com.google.cloud.bigquery.storage.v1.Exceptions; +import com.google.cloud.bigquery.storage.v1.Exceptions.StorageException; +import com.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse; +import com.google.cloud.bigquery.storage.v1.JsonStreamWriter; +import com.google.cloud.bigquery.storage.v1.TableName; +import com.google.cloud.bigquery.storage.v1.WriteStream; +import com.google.common.util.concurrent.MoreExecutors; +import com.google.gson.JsonArray; +import com.google.protobuf.Descriptors.DescriptorValidationException; +import java.io.IOException; +import java.util.concurrent.Phaser; +import javax.annotation.concurrent.GuardedBy; + +class BigQueryJdbcBulkInsertWriter { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private JsonStreamWriter jsonStreamWriter; + private final Phaser openRequestCount = new Phaser(1); + private final Object streamLock = new Object(); + + @GuardedBy("lock") + private RuntimeException error = null; + + void initialize(TableName parentTable, BigQueryWriteClient client, RetrySettings retrySettings) + throws IOException, DescriptorValidationException, InterruptedException { + WriteStream stream = WriteStream.newBuilder().setType(WriteStream.Type.PENDING).build(); + + CreateWriteStreamRequest createWriteStreamRequest = + CreateWriteStreamRequest.newBuilder() + .setParent(parentTable.toString()) + .setWriteStream(stream) + .build(); + WriteStream writeStream = client.createWriteStream(createWriteStreamRequest); + + JsonStreamWriter.Builder jsonStreamWriterBuilder = + JsonStreamWriter.newBuilder(writeStream.getName(), writeStream.getTableSchema()); + + if (retrySettings != null) { + jsonStreamWriterBuilder.setRetrySettings(retrySettings); + } + + this.jsonStreamWriter = jsonStreamWriterBuilder.build(); + } + + void append(JsonArray data, long offset) throws DescriptorValidationException, IOException { + synchronized (this.streamLock) { + if (this.error != null) { + throw this.error; + } + } + + ApiFuture future = jsonStreamWriter.append(data, offset); + ApiFutures.addCallback( + future, new AppendCompleteCallback(this), MoreExecutors.directExecutor()); + openRequestCount.register(); + } + + long cleanup(BigQueryWriteClient client) { + openRequestCount.arriveAndAwaitAdvance(); + jsonStreamWriter.close(); + + synchronized (this.streamLock) { + if (this.error != null) { + throw this.error; + } + } + + // Finalize the stream. + FinalizeWriteStreamResponse finalizeResponse = + client.finalizeWriteStream(jsonStreamWriter.getStreamName()); + LOG.finest("Rows written: " + finalizeResponse.getRowCount()); + return finalizeResponse.getRowCount(); + } + + String getStreamName() { + return jsonStreamWriter.getStreamName(); + } + + static class AppendCompleteCallback implements ApiFutureCallback { + + private final BigQueryJdbcBulkInsertWriter parent; + + AppendCompleteCallback(BigQueryJdbcBulkInsertWriter parent) { + this.parent = parent; + } + + public void onSuccess(AppendRowsResponse response) { + done(); + } + + public void onFailure(Throwable throwable) { + synchronized (this.parent.streamLock) { + if (this.parent.error == null) { + StorageException storageException = Exceptions.toStorageException(throwable); + this.parent.error = + (storageException != null) ? storageException : new RuntimeException(throwable); + } + } + done(); + } + + private void done() { + this.parent.openRequestCount.arriveAndDeregister(); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcCustomLogger.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcCustomLogger.java new file mode 100644 index 0000000000..9412b2fd79 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcCustomLogger.java @@ -0,0 +1,60 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.util.logging.Logger; + +class BigQueryJdbcCustomLogger extends Logger { + + protected BigQueryJdbcCustomLogger(String name, String resourceBundleName) { + super(name, resourceBundleName); + this.setParent(BigQueryJdbcRootLogger.getRootLogger()); + } + + BigQueryJdbcCustomLogger(String name) { + this(name, null); + this.setParent(BigQueryJdbcRootLogger.getRootLogger()); + } + + void finest(String format, Object... args) { + this.finest(() -> String.format(format, args)); + } + + void finer(String format, Object... args) { + this.finer(() -> String.format(format, args)); + } + + void fine(String format, Object... args) { + this.fine(() -> String.format(format, args)); + } + + void config(String format, Object... args) { + this.config(() -> String.format(format, args)); + } + + void info(String format, Object... args) { + this.info(() -> String.format(format, args)); + } + + void warning(String format, Object... args) { + this.warning(() -> String.format(format, args)); + } + + void severe(String format, Object... args) { + this.severe(() -> String.format(format, args)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtility.java new file mode 100644 index 0000000000..f7be358dde --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtility.java @@ -0,0 +1,733 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.api.client.util.PemReader.readFirstSectionAndClose; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.OAUTH_TYPE_ERROR_MESSAGE; + +import com.google.api.client.util.PemReader; +import com.google.api.client.util.SecurityUtils; +import com.google.auth.oauth2.AccessToken; +import com.google.auth.oauth2.ClientId; +import com.google.auth.oauth2.ExternalAccountCredentials; +import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ImpersonatedCredentials; +import com.google.auth.oauth2.ServiceAccountCredentials; +import com.google.auth.oauth2.UserAuthorizer; +import com.google.auth.oauth2.UserCredentials; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import com.google.gson.Strictness; +import com.google.gson.stream.JsonReader; +import java.awt.Desktop; +import java.io.BufferedReader; +import java.io.ByteArrayInputStream; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.OutputStream; +import java.io.PrintWriter; +import java.io.Reader; +import java.io.StringReader; +import java.net.ServerSocket; +import java.net.Socket; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.GeneralSecurityException; +import java.security.KeyFactory; +import java.security.NoSuchAlgorithmException; +import java.security.PrivateKey; +import java.security.spec.InvalidKeySpecException; +import java.security.spec.PKCS8EncodedKeySpec; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +final class BigQueryJdbcOAuthUtility { + + private static final String USER_AUTH_SUCCESS_HTTP_RESPONSE = + "HTTP/1.1 200 OK\n" + + "Content-Length: 300\n" + + "Connection: close\n" + + "Content-Type: text/html; charset=utf-8\n" + + "\n" + + "\n" + + "Thank you for using JDBC Driver for Google BigQuery!\n" + + "You may now close the window."; + + private static final int USER_AUTH_TIMEOUT_MS = 120000; + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJdbcOAuthUtility.class.getName()); + + private static final Map BYOID_NAME_MAP = + new HashMap() { + { + put(BigQueryJdbcUrlUtility.BYOID_AUDIENCE_URI_PROPERTY_NAME, "audience"); + put(BigQueryJdbcUrlUtility.BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME, "credential_source"); + put(BigQueryJdbcUrlUtility.BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME, "subject_token_type"); + put(BigQueryJdbcUrlUtility.BYOID_TOKEN_URI_PROPERTY_NAME, "token_url"); + put( + BigQueryJdbcUrlUtility.BYOID_POOL_USER_PROJECT_PROPERTY_NAME, + "workforce_pool_user_project"); + put( + BigQueryJdbcUrlUtility.BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME, + "service_account_impersonation_url"); + } + }; + + /** + * Parses the OAuth properties from the given URL. + * + * @param url The URL to parse. + * @return A map of OAuth properties. + */ + static Map parseOAuthProperties(DataSource ds, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + Map oauthProperties = new HashMap<>(); + + AuthType authType; + try { + authType = AuthType.fromValue(ds.getOAuthType()); + } catch (NumberFormatException exception) { + throw new IllegalArgumentException(OAUTH_TYPE_ERROR_MESSAGE); + } + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME, String.valueOf(authType)); + switch (authType) { + case GOOGLE_SERVICE_ACCOUNT: + // For using a Google Service Account (OAuth Type 0) + // need: project id, OAuthServiceAcctEmail and OAuthPvtKey or OAuthPvtKeyPath that can be + // .p12 or json. + // TODO: validation if .p12 or json file can be in getPropertyInfo can be handy for user + String serviceAccountEmail = ds.getOAuthServiceAcctEmail(); + String serviceAccountPK = ds.getOAuthPvtKey(); + String serviceAccountPrivateKeyPath = ds.getOAuthPvtKeyPath(); + String p12Password = ds.getOAuthP12Password(); + + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME, serviceAccountEmail); + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_P12_PASSWORD_PROPERTY_NAME, p12Password); + if (serviceAccountEmail != null && serviceAccountPK != null) { + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME, serviceAccountPK); + } else { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME, + serviceAccountPrivateKeyPath); + } + break; + case GOOGLE_USER_ACCOUNT: + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, ds.getOAuthClientId()); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, ds.getOAuthClientSecret()); + int reqGoogleDriveScope = ds.getRequestGoogleDriveScope(); + oauthProperties.put( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + String.valueOf(reqGoogleDriveScope)); + LOG.fine("RequestGoogleDriveScope parsed."); + break; + case PRE_GENERATED_TOKEN: + String refreshToken = ds.getOAuthRefreshToken(); + if (refreshToken != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME, refreshToken); + LOG.fine("OAuthRefreshToken provided."); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, ds.getOAuthClientId()); + LOG.fine("OAuthClientId provided."); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, ds.getOAuthClientSecret()); + LOG.fine("OAuthClientSecret provided."); + break; + } + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME, ds.getOAuthAccessToken()); + LOG.fine("OAuthAccessToken provided."); + break; + case APPLICATION_DEFAULT_CREDENTIALS: + // For Application Default Credentials (OAuth Type 3) + // need: project id + break; + case EXTERNAL_ACCOUNT_AUTH: + // For External account authentication (OAuth Type 4) + // need: project id, OAuthPvtKey or OAuthPvtKeyPath or BYOID_PROPERTIES + String pvtKey = ds.getOAuthPvtKey(); + String pvtKeyPath = ds.getOAuthPvtKeyPath(); + if (pvtKey != null) { + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME, pvtKey); + LOG.fine("OAuthPvtKey provided."); + } else if (pvtKeyPath != null) { + oauthProperties.put(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME, pvtKeyPath); + LOG.fine("OAuthPvtKeyPath provided."); + } else { + if (ds.getByoidAudienceUri() != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.BYOID_AUDIENCE_URI_PROPERTY_NAME, ds.getByoidAudienceUri()); + } + if (ds.getByoidCredentialSource() != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME, + ds.getByoidCredentialSource()); + } + if (ds.getByoidPoolUserProject() != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.BYOID_POOL_USER_PROJECT_PROPERTY_NAME, + ds.getByoidPoolUserProject()); + } + if (ds.getByoidSAImpersonationUri() != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME, + ds.getByoidSAImpersonationUri()); + } + if (ds.getByoidSubjectTokenType() != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME, + ds.getByoidSubjectTokenType()); + } + if (ds.getByoidTokenUri() != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.BYOID_TOKEN_URI_PROPERTY_NAME, ds.getByoidTokenUri()); + } + + String universeDomain = ds.getUniverseDomain(); + if (universeDomain != null) { + oauthProperties.put( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, universeDomain); + LOG.fine( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME + + " provided. Caller : " + + callerClassName); + } + } + break; + } + + if (authType == AuthType.GOOGLE_SERVICE_ACCOUNT + || authType == AuthType.GOOGLE_USER_ACCOUNT + || authType == AuthType.PRE_GENERATED_TOKEN) { + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME, + ds.getOAuthSAImpersonationEmail()); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME, + ds.getOAuthSAImpersonationChain()); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME, + ds.getOAuthSAImpersonationScopes() != null + ? ds.getOAuthSAImpersonationScopes() + : BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_SCOPES_VALUE); + oauthProperties.put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME, + ds.getOAuthSAImpersonationTokenLifetime() != null + ? ds.getOAuthSAImpersonationTokenLifetime() + : BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_VALUE); + } + return oauthProperties; + } + + /** + * Gets the credentials for the given Auth properties. + * + * @param authProperties A map of Auth properties. + * @return A GoogleCredentials object. + */ + static GoogleCredentials getCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + AuthType authType = + AuthType.valueOf(authProperties.get(BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME)); + + GoogleCredentials credentials; + switch (authType) { + case GOOGLE_SERVICE_ACCOUNT: + credentials = + getGoogleServiceAccountCredentials(authProperties, overrideProperties, callerClassName); + break; + case GOOGLE_USER_ACCOUNT: + credentials = + getGoogleUserAccountCredentials(authProperties, overrideProperties, callerClassName); + break; + case PRE_GENERATED_TOKEN: + credentials = + getPreGeneratedTokensCredentials(authProperties, overrideProperties, callerClassName); + break; + case APPLICATION_DEFAULT_CREDENTIALS: + // This auth method doesn't support service account impersonation + return getApplicationDefaultCredentials(callerClassName); + case EXTERNAL_ACCOUNT_AUTH: + // This auth method doesn't support service account impersonation + return getExternalAccountAuthCredentials(authProperties, callerClassName); + default: + throw new IllegalStateException(OAUTH_TYPE_ERROR_MESSAGE); + } + + return getServiceAccountImpersonatedCredentials(credentials, authProperties); + } + + private static boolean isFileExists(String filename) { + try { + return filename != null && !filename.isEmpty() && Files.exists(Paths.get(filename)); + } catch (Exception e) { + // Filename is invalid + return false; + } + } + + private static boolean isJson(byte[] value) { + try { + // This is done this way to ensure strict Json parsing + // https://github.com/google/gson/issues/1208#issuecomment-2120764686 + InputStream stream = new ByteArrayInputStream(value); + InputStreamReader reader = new InputStreamReader(stream); + JsonReader jsonReader = new JsonReader(reader); + jsonReader.setStrictness(Strictness.STRICT); + JsonElement json = JsonParser.parseReader(jsonReader); + return json != null; + } catch (Exception e) { + // Unable to parse json string + return false; + } + } + + private static GoogleCredentials getGoogleServiceAccountCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + ServiceAccountCredentials.Builder builder; + try { + final String pvtKeyPath = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME); + final String pvtKey = authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME); + final String pvtEmail = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME); + final String p12Password = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_P12_PASSWORD_PROPERTY_NAME); + + final String keyPath = pvtKeyPath != null ? pvtKeyPath : pvtKey; + PrivateKey key = null; + byte[] keyBytes = pvtKey != null ? pvtKey.getBytes() : null; + + if (isFileExists(keyPath)) { + try (InputStream stream = new FileInputStream(keyPath)) { + int bufferSize = 1024 * 1024; + byte[] buffer = new byte[bufferSize]; + stream.read(buffer, 0, bufferSize); + keyBytes = buffer; + } + } + + InputStream stream = null; + if (isJson(keyBytes)) { + stream = new ByteArrayInputStream(keyBytes); + } else if (pvtKey != null) { + key = privateKeyFromPkcs8(pvtKey); + } else if (keyBytes != null) { + key = privateKeyFromP12Bytes(keyBytes, p12Password); + } + + if (stream != null) { + builder = ServiceAccountCredentials.fromStream(stream).toBuilder(); + } else if (pvtEmail != null && key != null) { + builder = + ServiceAccountCredentials.newBuilder().setClientEmail(pvtEmail).setPrivateKey(key); + } else { + LOG.severe("No valid Service Account credentials provided."); + throw new BigQueryJdbcRuntimeException("No valid credentials provided."); + } + + if (overrideProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME)) { + builder.setTokenServerUri( + new URI(overrideProperties.get(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME))); + } + if (overrideProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + builder.setUniverseDomain( + overrideProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + } catch (URISyntaxException | IOException e) { + LOG.severe("Validation failure for Service Account credentials."); + throw new BigQueryJdbcRuntimeException(e); + } + LOG.info("GoogleCredentials instantiated. Auth Method: Service Account."); + return builder.build(); + } + + static UserAuthorizer getUserAuthorizer( + Map authProperties, + Map overrideProperties, + int port, + String callerClassName) + throws URISyntaxException { + LOG.finest("++enter++\t" + callerClassName); + List scopes = new ArrayList<>(); + scopes.add("https://www.googleapis.com/auth/bigquery"); + + // Add Google Drive scope conditionally + if (authProperties.containsKey( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)) { + try { + int driveScopeValue = + Integer.parseInt( + authProperties.get( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + if (driveScopeValue == 1) { + scopes.add("https://www.googleapis.com/auth/drive.readonly"); + LOG.fine("Added Google Drive read-only scope. Caller: " + callerClassName); + } + } catch (NumberFormatException e) { + LOG.severe( + "Invalid value for RequestGoogleDriveScope, defaulting to not request Drive scope." + + " Caller: " + + callerClassName); + } + } + + List responseTypes = new ArrayList<>(); + responseTypes.add("code"); + + ClientId clientId = + ClientId.of( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME), + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + UserAuthorizer.Builder userAuthorizerBuilder = + UserAuthorizer.newBuilder() + .setClientId(clientId) + .setScopes(scopes) + .setCallbackUri(URI.create("http://localhost:" + port)); + + if (overrideProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME)) { + userAuthorizerBuilder.setTokenServerUri( + new URI(overrideProperties.get(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME))); + } + + return userAuthorizerBuilder.build(); + } + + static UserCredentials getCredentialsFromCode( + UserAuthorizer userAuthorizer, String code, String callerClassName) throws IOException { + LOG.finest("++enter++\t" + callerClassName); + return userAuthorizer.getCredentialsFromCode(code, URI.create("")); + } + + private static GoogleCredentials getGoogleUserAccountCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + try { + ServerSocket serverSocket = new ServerSocket(0); + serverSocket.setSoTimeout(USER_AUTH_TIMEOUT_MS); + int port = serverSocket.getLocalPort(); + UserAuthorizer userAuthorizer = + getUserAuthorizer(authProperties, overrideProperties, port, callerClassName); + + URL authURL = userAuthorizer.getAuthorizationUrl("user", "", URI.create("")); + String code; + + if (Desktop.isDesktopSupported()) { + Desktop.getDesktop().browse(authURL.toURI()); + + Socket socket = serverSocket.accept(); + + OutputStream outputStream = socket.getOutputStream(); + PrintWriter printWriter = new PrintWriter(outputStream); + + BufferedReader bufferedReader = + new BufferedReader(new InputStreamReader(socket.getInputStream())); + String response = bufferedReader.readLine(); + + Pattern p = Pattern.compile("(?<=code=).*?(?=&|$)"); + Matcher m = p.matcher(response); + + if (!m.find()) { + throw new BigQueryJdbcRuntimeException("Could not retrieve the code for user auth"); + } + code = m.group(); + + printWriter.println(USER_AUTH_SUCCESS_HTTP_RESPONSE); + printWriter.flush(); + socket.close(); + serverSocket.close(); + } else { + throw new BigQueryJdbcRuntimeException("User auth only supported in desktop environments"); + } + + return getCredentialsFromCode(userAuthorizer, code, callerClassName); + } catch (IOException | URISyntaxException ex) { + LOG.severe( + "Failed to establish connection using User Account authentication: %s", ex.getMessage()); + throw new BigQueryJdbcRuntimeException(ex); + } + } + + private static GoogleCredentials getPreGeneratedAccessTokenCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + GoogleCredentials.Builder builder = GoogleCredentials.newBuilder(); + if (overrideProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + builder.setUniverseDomain( + overrideProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + LOG.info("Connection established. Auth Method: Pre-generated Access Token."); + return builder + .setAccessToken( + AccessToken.newBuilder() + .setTokenValue( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME)) + .build()) + .build(); + } + + static GoogleCredentials getPreGeneratedTokensCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + if (authProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME)) { + try { + return getPreGeneratedRefreshTokenCredentials( + authProperties, overrideProperties, callerClassName); + } catch (URISyntaxException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } else { + return getPreGeneratedAccessTokenCredentials( + authProperties, overrideProperties, callerClassName); + } + } + + static UserCredentials getPreGeneratedRefreshTokenCredentials( + Map authProperties, + Map overrideProperties, + String callerClassName) + throws URISyntaxException { + LOG.finest("++enter++\t" + callerClassName); + + UserCredentials.Builder userCredentialsBuilder = + UserCredentials.newBuilder() + .setRefreshToken( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME)) + .setClientId(authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME)) + .setClientSecret( + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + + if (overrideProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME)) { + userCredentialsBuilder.setTokenServerUri( + new URI(overrideProperties.get(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME))); + } + if (overrideProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + userCredentialsBuilder.setUniverseDomain( + overrideProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + LOG.info("Connection established. Auth Method: Pre-generated Refresh Token."); + return userCredentialsBuilder.build(); + } + + private static GoogleCredentials getApplicationDefaultCredentials(String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + try { + GoogleCredentials credentials = GoogleCredentials.getApplicationDefault(); + String principal = "unknown"; + if (credentials instanceof ServiceAccountCredentials) { + principal = ((ServiceAccountCredentials) credentials).getClientEmail(); + } else if (credentials instanceof UserCredentials) { + principal = "user credentials"; + } else if (credentials instanceof ExternalAccountCredentials) { + principal = "external account"; + } + LOG.info( + "Connection established. Auth Method: Application Default Credentials, Principal: %s.", + principal); + return credentials; + } catch (IOException exception) { + // TODO throw exception + throw new BigQueryJdbcRuntimeException("Application default credentials not found."); + } + } + + private static GoogleCredentials getExternalAccountAuthCredentials( + Map authProperties, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + try { + JsonObject jsonObject = null; + String credentialsPath = null; + if (authProperties.containsKey(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME)) { + String pvtKeyPath = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME).trim(); + if (pvtKeyPath.startsWith("{")) { + jsonObject = JsonParser.parseString(pvtKeyPath).getAsJsonObject(); + } else { + credentialsPath = pvtKeyPath; + } + } else if (authProperties.containsKey( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME)) { + credentialsPath = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME); + } else { + jsonObject = new JsonObject(); + for (String property : BigQueryJdbcUrlUtility.BYOID_PROPERTIES) { + if (Objects.equals( + property, BigQueryJdbcUrlUtility.BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME)) { + jsonObject.add( + BYOID_NAME_MAP.get(property), + JsonParser.parseString(authProperties.get(property)).getAsJsonObject()); + } else if (authProperties.containsKey(property)) { + jsonObject.addProperty(BYOID_NAME_MAP.get(property), authProperties.get(property)); + } + } + if (authProperties.containsKey( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)) { + jsonObject.addProperty( + "universe_domain", + authProperties.get(BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME)); + } + } + + if (credentialsPath != null) { + return ExternalAccountCredentials.fromStream( + Files.newInputStream(Paths.get(credentialsPath))); + } else if (jsonObject != null) { + return ExternalAccountCredentials.fromStream( + new ByteArrayInputStream(jsonObject.toString().getBytes())); + } else { + throw new IllegalArgumentException( + "Insufficient info provided for external authentication"); + } + } catch (IOException e) { + throw new BigQueryJdbcRuntimeException(e); + } + } + + // This function checks if connection string contains configuration for + // credentials impersonation. If not, it returns regular credentials object. + // If impersonated service account is provided, returns Credentials object + // accomodating this information. + private static GoogleCredentials getServiceAccountImpersonatedCredentials( + GoogleCredentials credentials, Map authProperties) { + + String impersonationEmail = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME); + if (impersonationEmail == null || impersonationEmail.isEmpty()) { + return credentials; + } + + String impersonationChainString = + authProperties.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME); + List impersonationChain = null; + if (impersonationChainString != null && !impersonationChainString.isEmpty()) { + impersonationChain = Arrays.asList(impersonationChainString.split(",")); + } + + // Scopes has a default value, so it should never be null + List impersonationScopes = + Arrays.asList( + authProperties + .get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME) + .split(",")); + + // Token lifetime has a default value, so it should never be null + String impersonationLifetime = + authProperties.get( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME); + int impersonationLifetimeInt = 0; + try { + impersonationLifetimeInt = Integer.parseInt(impersonationLifetime); + } catch (NumberFormatException e) { + LOG.severe("Invalid value for ServiceAccountImpersonationTokenLifetime."); + throw new IllegalArgumentException( + "Invalid value for ServiceAccountImpersonationTokenLifetime: must be a positive integer.", + e); + } + + return ImpersonatedCredentials.create( + credentials, + impersonationEmail, + impersonationChain, + impersonationScopes, + impersonationLifetimeInt); + } + + static PrivateKey privateKeyFromP12Bytes(byte[] privateKey, String password) { + try { + InputStream stream = new ByteArrayInputStream(privateKey); + return SecurityUtils.loadPrivateKeyFromKeyStore( + SecurityUtils.getPkcs12KeyStore(), stream, "notasecret", "privatekey", password); + } catch (IOException | GeneralSecurityException e) { + LOG.warning("Unable to parse p12 file: " + e.getMessage()); + return null; + } + } + + static PrivateKey privateKeyFromPkcs8(String privateKeyPkcs8) { + try { + Reader reader = new StringReader(privateKeyPkcs8); + PemReader.Section section = readFirstSectionAndClose(reader, "PRIVATE KEY"); + if (section == null) { + throw new IOException("Invalid PKCS#8 data."); + } + byte[] bytes = section.getBase64DecodedBytes(); + PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes); + KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory(); + return keyFactory.generatePrivate(keySpec); + } catch (NoSuchAlgorithmException | InvalidKeySpecException | IOException e) { + LOG.warning("Unable to parse pkcs8 secret: " + e.getMessage()); + return null; + } + } + + enum AuthType { + GOOGLE_SERVICE_ACCOUNT(0), + GOOGLE_USER_ACCOUNT(1), + PRE_GENERATED_TOKEN(2), + APPLICATION_DEFAULT_CREDENTIALS(3), + EXTERNAL_ACCOUNT_AUTH(4); + + private final int value; + + AuthType(int value) { + this.value = value; + } + + static AuthType fromValue(int value) { + for (AuthType authType : values()) { + if (authType.value == value) { + return authType; + } + } + throw new IllegalStateException(OAUTH_TYPE_ERROR_MESSAGE + ": " + value); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameter.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameter.java new file mode 100644 index 0000000000..cb11d14e46 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameter.java @@ -0,0 +1,117 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; + +class BigQueryJdbcParameter { + private int index; + private Object value; + private Class type; + private StandardSQLTypeName sqlType; + // Additional parameters needed for CallableStatement. + private String paramName; + private BigQueryStatementParameterType paramType; + private int scale; + + BigQueryJdbcParameter() {} + + BigQueryJdbcParameter(BigQueryJdbcParameter parameter) { + this.index = parameter.index; + this.value = parameter.value; + this.type = parameter.type; + this.sqlType = parameter.sqlType; + } + + int getIndex() { + return index; + } + + void setIndex(int index) { + this.index = index; + } + + Object getValue() { + return value; + } + + void setValue(Object value) { + this.value = value; + } + + Class getType() { + return type; + } + + public void setType(Class type) { + this.type = type; + } + + StandardSQLTypeName getSqlType() { + return sqlType; + } + + void setSqlType(StandardSQLTypeName sqlType) { + this.sqlType = sqlType; + } + + String getParamName() { + return paramName; + } + + void setParamName(String paramName) { + this.paramName = paramName; + } + + BigQueryStatementParameterType getParamType() { + return paramType; + } + + void setParamType(BigQueryStatementParameterType paramType) { + this.paramType = paramType; + } + + int getScale() { + return scale; + } + + void setScale(int scale) { + this.scale = scale; + } + + @Override + public String toString() { + return "BigQueryJdbcParameter{" + + "index=" + + index + + ", value=" + + value + + ", type=" + + type + + ", sqlType=" + + sqlType + + ", paramName='" + + paramName + + '\'' + + ", paramType=" + + paramType.name() + + ", scale=" + + scale + + '}'; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtility.java new file mode 100644 index 0000000000..52eef2739d --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtility.java @@ -0,0 +1,306 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.storage.v1.stub.BigQueryReadStubSettings.defaultGrpcTransportProviderBuilder; + +import com.google.api.client.http.HttpTransport; +import com.google.api.client.http.apache.v5.Apache5HttpTransport; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.auth.http.HttpTransportFactory; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.http.HttpTransportOptions; +import io.grpc.HttpConnectProxiedSocketAddress; +import io.grpc.ProxiedSocketAddress; +import io.grpc.ProxyDetector; +import io.grpc.netty.shaded.io.grpc.netty.GrpcSslContexts; +import io.grpc.netty.shaded.io.netty.handler.ssl.SslContext; +import java.io.FileInputStream; +import java.io.IOException; +import java.net.InetSocketAddress; +import java.net.SocketAddress; +import java.security.GeneralSecurityException; +import java.security.KeyStore; +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Pattern; +import javax.net.ssl.SSLContext; +import javax.net.ssl.TrustManagerFactory; +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.UsernamePasswordCredentials; +import org.apache.hc.client5.http.impl.DefaultAuthenticationStrategy; +import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; +import org.apache.hc.client5.http.impl.classic.HttpClients; +import org.apache.hc.client5.http.impl.io.PoolingHttpClientConnectionManagerBuilder; +import org.apache.hc.client5.http.impl.routing.DefaultProxyRoutePlanner; +import org.apache.hc.client5.http.routing.HttpRoutePlanner; +import org.apache.hc.client5.http.ssl.SSLConnectionSocketFactory; +import org.apache.hc.core5.http.HttpHost; + +final class BigQueryJdbcProxyUtility { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJdbcProxyUtility.class.getName()); + static final String validPortRegex = + "^([1-9][0-9]{0,3}|[1-5][0-9]{4}|6[0-4][0-9]{3}|65[0-4][0-9]{2}|655[0-2][0-9]|6553[0-5])$"; + + private BigQueryJdbcProxyUtility() {} + + static Map parseProxyProperties(DataSource ds, String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + Map proxyProperties = new HashMap<>(); + String proxyHost = ds.getProxyHost(); + if (proxyHost != null) { + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, proxyHost); + } + String proxyPort = ds.getProxyPort(); + if (proxyPort != null) { + if (!Pattern.compile(validPortRegex).matcher(proxyPort).find()) { + throw new IllegalArgumentException( + String.format( + "Illegal port number provided %s. Please provide a valid port number.", proxyPort)); + } + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, proxyPort); + } + String proxyUid = ds.getProxyUid(); + if (proxyUid != null) { + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME, proxyUid); + } + String proxyPwd = ds.getProxyPwd(); + if (proxyPwd != null) { + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME, proxyPwd); + } + + boolean isMissingProxyHostOrPortWhenProxySet = + (proxyHost == null && proxyPort != null) || (proxyHost != null && proxyPort == null); + if (isMissingProxyHostOrPortWhenProxySet) { + throw new IllegalArgumentException( + "Both ProxyHost and ProxyPort parameters need to be specified. No defaulting behavior" + + " occurs."); + } + boolean isMissingProxyUidOrPwdWhenAuthSet = + (proxyUid == null && proxyPwd != null) || (proxyUid != null && proxyPwd == null); + if (isMissingProxyUidOrPwdWhenAuthSet) { + throw new IllegalArgumentException( + "Both ProxyUid and ProxyPwd parameters need to be specified for authentication."); + } + boolean isProxyAuthSetWithoutProxySettings = proxyUid != null && proxyHost == null; + if (isProxyAuthSetWithoutProxySettings) { + throw new IllegalArgumentException( + "Proxy authentication provided via connection string with no proxy host or port set."); + } + return proxyProperties; + } + + static Map parseProxyProperties(String URL, String callerClassName) { + return parseProxyProperties(DataSource.fromUrl(URL), callerClassName); + } + + static HttpTransportOptions getHttpTransportOptions( + Map proxyProperties, + String sslTrustStorePath, + String sslTrustStorePassword, + Integer connectTimeout, + Integer readTimeout, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + + boolean hasProxyOrSsl = + proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME) + || sslTrustStorePath != null; + boolean hasTimeoutConfig = connectTimeout != null || readTimeout != null; + + if (!hasProxyOrSsl && !hasTimeoutConfig) { + return null; + } + + HttpTransportOptions.Builder httpTransportOptionsBuilder = HttpTransportOptions.newBuilder(); + if (hasProxyOrSsl) { + httpTransportOptionsBuilder.setHttpTransportFactory( + getHttpTransportFactory( + proxyProperties, sslTrustStorePath, sslTrustStorePassword, callerClassName)); + } + + if (connectTimeout != null) { + httpTransportOptionsBuilder.setConnectTimeout(connectTimeout); + } + if (readTimeout != null) { + httpTransportOptionsBuilder.setReadTimeout(readTimeout); + } + + return httpTransportOptionsBuilder.build(); + } + + private static HttpTransportFactory getHttpTransportFactory( + Map proxyProperties, + String sslTrustStorePath, + String sslTrustStorePassword, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + HttpClientBuilder httpClientBuilder = HttpClients.custom(); + boolean explicitProxySet = + proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + + if (explicitProxySet) { + HttpHost proxyHostDetails = + new HttpHost( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME), + Integer.parseInt( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME))); + HttpRoutePlanner httpRoutePlanner = new DefaultProxyRoutePlanner(proxyHostDetails); + httpClientBuilder.setRoutePlanner(httpRoutePlanner); + addAuthToProxyIfPresent(proxyProperties, httpClientBuilder, callerClassName); + } else { + httpClientBuilder.useSystemProperties(); + } + + if (sslTrustStorePath != null) { + try (FileInputStream trustStoreStream = new FileInputStream(sslTrustStorePath)) { + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + char[] trustStorePasswordChars = + sslTrustStorePassword != null ? sslTrustStorePassword.toCharArray() : null; + trustStore.load(trustStoreStream, trustStorePasswordChars); + + TrustManagerFactory trustManagerFactory = + TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(trustStore); + + SSLContext sslContext = SSLContext.getInstance("TLS"); + sslContext.init(null, trustManagerFactory.getTrustManagers(), null); + + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + httpClientBuilder.setConnectionManager( + PoolingHttpClientConnectionManagerBuilder.create() + .setSSLSocketFactory(sslSocketFactory) + .build()); + } catch (IOException | GeneralSecurityException e) { + throw new BigQueryJdbcRuntimeException(e); + } + } + addAuthToProxyIfPresent(proxyProperties, httpClientBuilder, callerClassName); + + CloseableHttpClient httpClient = httpClientBuilder.build(); + final HttpTransport httpTransport = new Apache5HttpTransport(httpClient); + return () -> httpTransport; + } + + private static void addAuthToProxyIfPresent( + Map proxyProperties, + HttpClientBuilder closeableHttpClientBuilder, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + if (proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME) + && proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME)) { + + AuthScope authScope = + new AuthScope( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME), + Integer.parseInt( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME))); + UsernamePasswordCredentials usernamePasswordCredentials = + new UsernamePasswordCredentials( + proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME), + proxyProperties + .get(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME) + .toCharArray()); + + BasicCredentialsProvider proxyCredentialsProvider = new BasicCredentialsProvider(); + proxyCredentialsProvider.setCredentials(authScope, usernamePasswordCredentials); + closeableHttpClientBuilder.setDefaultCredentialsProvider(proxyCredentialsProvider); + closeableHttpClientBuilder.setProxyAuthenticationStrategy( + DefaultAuthenticationStrategy.INSTANCE); // order of challenge? so it will show up + } + } + + static TransportChannelProvider getTransportChannelProvider( + Map proxyProperties, + String sslTrustStorePath, + String sslTrustStorePassword, + String callerClassName) { + LOG.finest("++enter++\t" + callerClassName); + boolean hasProxy = proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + boolean hasSsl = sslTrustStorePath != null; + + if (!hasProxy && !hasSsl) { + return null; + } + + TransportChannelProvider transportChannelProvider = + defaultGrpcTransportProviderBuilder() + .setChannelConfigurator( + managedChannelBuilder -> { + if (hasProxy) { + managedChannelBuilder.proxyDetector( + new ProxyDetector() { + @Override + public ProxiedSocketAddress proxyFor(SocketAddress socketAddress) { + return getHttpConnectProxiedSocketAddress( + (InetSocketAddress) socketAddress, proxyProperties); + } + }); + } + if (hasSsl + && managedChannelBuilder + instanceof io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder) { + try (FileInputStream trustStoreStream = + new FileInputStream(sslTrustStorePath)) { + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + char[] trustStorePasswordChars = + sslTrustStorePassword != null + ? sslTrustStorePassword.toCharArray() + : null; + trustStore.load(trustStoreStream, trustStorePasswordChars); + + TrustManagerFactory trustManagerFactory = + TrustManagerFactory.getInstance( + TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(trustStore); + + SslContext grpcSslContext = + GrpcSslContexts.forClient().trustManager(trustManagerFactory).build(); + ((io.grpc.netty.shaded.io.grpc.netty.NettyChannelBuilder) + managedChannelBuilder) + .sslContext(grpcSslContext); + + } catch (IOException | GeneralSecurityException e) { + throw new BigQueryJdbcRuntimeException(e); + } + } + return managedChannelBuilder; + }) + .build(); + return transportChannelProvider; + } + + private static HttpConnectProxiedSocketAddress getHttpConnectProxiedSocketAddress( + InetSocketAddress socketAddress, Map proxyProperties) { + String proxyHost = proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME); + int proxyPort = + Integer.parseInt(proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME)); + HttpConnectProxiedSocketAddress.Builder builder = + HttpConnectProxiedSocketAddress.newBuilder() + .setProxyAddress(new InetSocketAddress(proxyHost, proxyPort)) + .setTargetAddress(socketAddress); + if (proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME) + && proxyProperties.containsKey(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME)) { + builder.setUsername(proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME)); + builder.setPassword(proxyProperties.get(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME)); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcRootLogger.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcRootLogger.java new file mode 100644 index 0000000000..a672344155 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcRootLogger.java @@ -0,0 +1,199 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.io.IOException; +import java.lang.management.ManagementFactory; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardCopyOption; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.Optional; +import java.util.logging.ConsoleHandler; +import java.util.logging.FileHandler; +import java.util.logging.Formatter; +import java.util.logging.Handler; +import java.util.logging.Level; +import java.util.logging.LogRecord; +import java.util.logging.Logger; + +/** This class is used to log messages from the BigQuery JDBC Driver. */ +class BigQueryJdbcRootLogger { + + /** + * Note: Each connection will have its own file handler with the level and logPath specified in + * the connection properties. But the logs will be driver logs and not connection specific. + */ + private static final Logger logger = Logger.getLogger("com.google.cloud.bigquery"); + + private static final Logger storageLogger = Logger.getLogger("com.google.cloud.bigquery.storage"); + private static final boolean isTest = Boolean.getBoolean("JDBC_TESTS"); + + private static Handler fileHandler = null; + private static Path currentLogPath = null; + private static int fileCounter = 0; + + static { + logger.setUseParentHandlers(false); + storageLogger.setUseParentHandlers(true); + if (isTest) { + ConsoleHandler consoleHandler = new ConsoleHandler(); + consoleHandler.setLevel(Level.SEVERE); + consoleHandler.setFormatter(getFormatter()); + logger.addHandler(consoleHandler); + } + } + + public static Formatter getFormatter() { + return new Formatter() { + private static final String PATTERN = "yyyy-MM-dd HH:mm:ss.SSS"; + private static final String FORMAT = + "%1$s %2$5s %3$d --- [%4$-7.15s] %5$-50s %6$-20s: %7$s%8$s"; + private static final int MAX_THREAD_NAME_LENGTH = 15; + + /** + * Returns the thread for the given thread id. + * + * @param threadId ID for the thread being logged. + * @return returns the thread + */ + Optional getThread(long threadId) { + return Thread.getAllStackTraces().keySet().stream() + .filter(thread -> thread.getId() == threadId) + .findFirst(); + } + + @Override + public String format(LogRecord record) { + String date = new SimpleDateFormat(PATTERN).format(new Date(record.getMillis())); + String threadName = + getThread(record.getThreadID()) + .map(Thread::getName) + .map( + name -> + name.length() > MAX_THREAD_NAME_LENGTH + ? name.substring(name.length() - MAX_THREAD_NAME_LENGTH) + : name) + .orElse(""); + long processId = + Long.parseLong(ManagementFactory.getRuntimeMXBean().getName().split("@")[0]); + String sourceClassName = record.getLoggerName(); + String sourceMethodName = record.getSourceMethodName(); + return String.format( + FORMAT, + date, + record.getLevel().getName(), + processId, + threadName, + sourceClassName, + sourceMethodName, + record.getMessage(), + System.lineSeparator()); + } + }; + } + + public static Logger getRootLogger() { + return logger; + } + + private static void setHandler() throws IOException { + // If Console handler exists, remove it. + // If File handler exists, use it. Else create new one. + for (Handler h : logger.getHandlers()) { + if (h instanceof ConsoleHandler) { + if (!isTest) { + h.close(); + logger.removeHandler(h); + } + } else if (h instanceof FileHandler) { + fileHandler = h; + } + } + + if (fileHandler == null) { + String fileName = String.format("BigQueryJdbc%d", fileCounter); + fileCounter++; + + currentLogPath = Files.createTempFile(fileName, ".log"); + currentLogPath.toFile().deleteOnExit(); + + fileHandler = new FileHandler(currentLogPath.toString(), 0, 1, true); + logger.addHandler(fileHandler); + } + } + + public static void setLevel(Level level, String logPath) throws IOException { + if (level != Level.OFF) { + setPath(logPath); + if (logger.getHandlers().length == 0) { + setHandler(); + fileHandler.setFormatter(getFormatter()); + logger.setUseParentHandlers(false); + } + fileHandler.setLevel(level); + logger.setLevel(level); + } else { + for (Handler h : logger.getHandlers()) { + h.close(); + logger.removeHandler(h); + } + fileHandler = null; + currentLogPath = null; + } + } + + static void setPath(String logPath) { + try { + if (!logPath.isEmpty() && !logPath.endsWith("/")) { + logPath = logPath + "/"; + } + Path dir = Paths.get(logPath); + if (!Files.exists(dir)) { + Files.createDirectory(dir); + } + + String fileName = String.format("BigQueryJdbc%d.log", fileCounter); + fileCounter++; + Path destination = Paths.get(logPath + fileName).toAbsolutePath(); + + if (currentLogPath != null && !currentLogPath.equals(destination)) { + Path source = Paths.get(currentLogPath.toUri()); + Files.move(source, destination, StandardCopyOption.REPLACE_EXISTING); + } + + currentLogPath = destination; + fileHandler = new FileHandler(currentLogPath.toString(), 0, 1, true); + fileHandler.setFormatter(getFormatter()); + + for (Handler h : logger.getHandlers()) { + if (h instanceof FileHandler) { + h.close(); + logger.removeHandler(h); + break; + } + } + + logger.addHandler(fileHandler); + + } catch (IOException ex) { + logger.warning("Log File warning : " + ex); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcTypeMappings.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcTypeMappings.java new file mode 100644 index 0000000000..b95ac02302 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcTypeMappings.java @@ -0,0 +1,159 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonObject; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.util.AbstractMap.SimpleEntry; +import java.util.Map; + +@InternalApi +class BigQueryJdbcTypeMappings { + + static final Map> standardSQLToJavaTypeMapping = + ImmutableMap.ofEntries( + entry(StandardSQLTypeName.INT64, Long.class), + entry(StandardSQLTypeName.BOOL, Boolean.class), + entry(StandardSQLTypeName.FLOAT64, Double.class), + entry(StandardSQLTypeName.NUMERIC, BigDecimal.class), + entry(StandardSQLTypeName.BIGNUMERIC, BigDecimal.class), + entry(StandardSQLTypeName.STRING, String.class), + entry(StandardSQLTypeName.TIMESTAMP, Timestamp.class), + entry(StandardSQLTypeName.DATE, Date.class), + entry(StandardSQLTypeName.TIME, Time.class), + entry(StandardSQLTypeName.DATETIME, Timestamp.class), + entry(StandardSQLTypeName.GEOGRAPHY, String.class), + entry(StandardSQLTypeName.JSON, String.class), + entry(StandardSQLTypeName.INTERVAL, String.class), + entry(StandardSQLTypeName.RANGE, String.class), + entry(StandardSQLTypeName.BYTES, byte[].class), + entry(StandardSQLTypeName.STRUCT, Struct.class), + entry(StandardSQLTypeName.ARRAY, Array.class)); + + static final Map standardSQLToJavaSqlTypesMapping = + ImmutableMap.ofEntries( + entry(StandardSQLTypeName.INT64, Types.BIGINT), + entry(StandardSQLTypeName.BOOL, Types.BOOLEAN), + entry(StandardSQLTypeName.FLOAT64, Types.DOUBLE), + entry(StandardSQLTypeName.NUMERIC, Types.NUMERIC), + entry(StandardSQLTypeName.BIGNUMERIC, Types.NUMERIC), + entry(StandardSQLTypeName.STRING, Types.NVARCHAR), + entry(StandardSQLTypeName.TIMESTAMP, Types.TIMESTAMP), + entry(StandardSQLTypeName.DATE, Types.DATE), + entry(StandardSQLTypeName.TIME, Types.TIME), + entry(StandardSQLTypeName.DATETIME, Types.TIMESTAMP), + entry(StandardSQLTypeName.GEOGRAPHY, Types.OTHER), + entry(StandardSQLTypeName.JSON, Types.OTHER), + entry(StandardSQLTypeName.INTERVAL, Types.OTHER), + entry(StandardSQLTypeName.RANGE, Types.OTHER), + entry(StandardSQLTypeName.BYTES, Types.VARBINARY), + entry(StandardSQLTypeName.STRUCT, Types.STRUCT), + entry(StandardSQLTypeName.ARRAY, Types.ARRAY)); + + static final Map> javaSQLToJavaTypeMapping = + ImmutableMap.ofEntries( + entry(Types.BIGINT, Long.class), + entry(Types.INTEGER, Integer.class), + entry(Types.BOOLEAN, Boolean.class), + entry(Types.DOUBLE, Double.class), + entry(Types.FLOAT, Float.class), + entry(Types.NUMERIC, BigDecimal.class), + entry(Types.VARCHAR, String.class), + entry(Types.NVARCHAR, String.class), + entry(Types.TIMESTAMP, Timestamp.class), + entry(Types.DATE, Date.class), + entry(Types.TIME, Time.class), + entry(Types.OTHER, String.class), + entry(Types.BINARY, byte[].class), + entry(Types.VARBINARY, byte[].class), + entry(Types.STRUCT, Struct.class), + entry(Types.BIT, Boolean.class), + entry(Types.ARRAY, Array.class)); + + static StandardSQLTypeName classToType(Class type) + throws BigQueryJdbcSqlFeatureNotSupportedException { + if (Boolean.class.isAssignableFrom(type)) { + return StandardSQLTypeName.BOOL; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.STRING; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.GEOGRAPHY; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.DATETIME; + } else if (Integer.class.isAssignableFrom(type)) { + return StandardSQLTypeName.INT64; + } else if (Long.class.isAssignableFrom(type)) { + return StandardSQLTypeName.INT64; + } else if (Double.class.isAssignableFrom(type)) { + return StandardSQLTypeName.FLOAT64; + } else if (Float.class.isAssignableFrom(type)) { + return StandardSQLTypeName.FLOAT64; + } else if (BigDecimal.class.isAssignableFrom(type)) { + return StandardSQLTypeName.NUMERIC; + } else if (BigDecimal.class.isAssignableFrom(type)) { + return StandardSQLTypeName.BIGNUMERIC; + } else if (Date.class.isAssignableFrom(type)) { + return StandardSQLTypeName.DATE; + } else if (Timestamp.class.isAssignableFrom(type)) { + return StandardSQLTypeName.TIMESTAMP; + } else if (Time.class.isAssignableFrom(type)) { + return StandardSQLTypeName.TIME; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.JSON; + } else if (JsonObject.class.isAssignableFrom(type)) { + return StandardSQLTypeName.JSON; + } else if (Byte.class.isAssignableFrom(type)) { + return StandardSQLTypeName.BYTES; + } else if (Array.class.isAssignableFrom(type)) { + return StandardSQLTypeName.ARRAY; + } else if (Struct.class.isAssignableFrom(type)) { + return StandardSQLTypeName.STRUCT; + } else if (byte[].class.isAssignableFrom(type)) { + return StandardSQLTypeName.BYTES; + } + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported object type for QueryParameter: " + type); + } + + static Class getJavaType(int javaSQLType) throws BigQueryJdbcSqlFeatureNotSupportedException { + if (!javaSQLToJavaTypeMapping.containsKey(javaSQLType)) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported Java type for SQL type: " + javaSQLType); + } + Class javaType = javaSQLToJavaTypeMapping.get(javaSQLType); + if (javaType == null) { + // This should never happen unless the map was initialized with null values. + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "Unsupported Java type for SQL type: " + javaSQLType); + } + return javaType; + } + + private static SimpleEntry entry(K key, V value) { + return new SimpleEntry<>(key, value); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtility.java new file mode 100644 index 0000000000..5b89cf27ee --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtility.java @@ -0,0 +1,819 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.client.util.escape.CharEscapers; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.common.base.Splitter; +import com.google.common.collect.ImmutableList; +import com.google.common.net.UrlEscapers; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Properties; +import java.util.Set; +import java.util.logging.Level; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * This class implements all the methods that parse Connection property values from the Connection + * String. + */ +final class BigQueryJdbcUrlUtility { + + private static final Map> PARSE_CACHE = + Collections.synchronizedMap( + new LinkedHashMap>(50, 0.75f, true) { + protected boolean removeEldestEntry(Map.Entry> eldest) { + return size() > 50; // bound cache size + } + }); + + // TODO: Add all Connection options + static final String ALLOW_LARGE_RESULTS_PROPERTY_NAME = "AllowLargeResults"; + static final String LARGE_RESULTS_TABLE_PROPERTY_NAME = "LargeResultTable"; + static final String LARGE_RESULTS_DATASET_PROPERTY_NAME = "LargeResultDataset"; + static final String UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME = "UnsupportedHTAPIFallback"; + static final boolean DEFAULT_UNSUPPORTED_HTAPI_FALLBACK_VALUE = true; + static final String DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME = + "LargeResultsDatasetExpirationTime"; + static final long DEFAULT_DESTINATION_DATASET_EXPIRATION_TIME_VALUE = 3600000L; + static final boolean DEFAULT_ALLOW_LARGE_RESULTS = true; + static final String QUERY_DIALECT_PROPERTY_NAME = "QueryDialect"; + static final String DEFAULT_QUERY_DIALECT_VALUE = "SQL"; + static final String UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME = "universeDomain"; + static final String DEFAULT_UNIVERSE_DOMAIN_VALUE = "googleapis.com"; + static final String PROJECT_ID_PROPERTY_NAME = "ProjectId"; + static final String DEFAULT_DATASET_PROPERTY_NAME = "DefaultDataset"; + static final String OAUTH_TYPE_PROPERTY_NAME = "OAuthType"; + static final String HTAPI_ACTIVATION_RATIO_PROPERTY_NAME = "HighThroughputActivationRatio"; + static final String KMS_KEY_NAME_PROPERTY_NAME = "KMSKeyName"; + static final String QUERY_PROPERTIES_NAME = "QueryProperties"; + static final int DEFAULT_HTAPI_ACTIVATION_RATIO_VALUE = + 2; // TODO: to adjust this value before private preview based on performance testing. + static final String HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME = "HighThroughputMinTableSize"; + static final int DEFAULT_HTAPI_MIN_TABLE_SIZE_VALUE = 100; + static final int DEFAULT_OAUTH_TYPE_VALUE = -1; + static final String LOCATION_PROPERTY_NAME = "Location"; + static final String ENDPOINT_OVERRIDES_PROPERTY_NAME = "EndpointOverrides"; + static final String PRIVATE_SERVICE_CONNECT_PROPERTY_NAME = "PrivateServiceConnectUris"; + static final String OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME = + "ServiceAccountImpersonationEmail"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_EMAIL_VALUE = null; + static final String OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME = + "ServiceAccountImpersonationChain"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_CHAIN_VALUE = null; + static final String OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME = + "ServiceAccountImpersonationScopes"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_SCOPES_VALUE = + "https://www.googleapis.com/auth/bigquery"; + static final String OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME = + "ServiceAccountImpersonationTokenLifetime"; + static final String DEFAULT_OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_VALUE = "3600"; + static final String OAUTH_SA_EMAIL_PROPERTY_NAME = "OAuthServiceAcctEmail"; + static final String OAUTH_PVT_KEY_PATH_PROPERTY_NAME = "OAuthPvtKeyPath"; + static final String OAUTH_P12_PASSWORD_PROPERTY_NAME = "OAuthP12Password"; + static final String DEFAULT_OAUTH_P12_PASSWORD_VALUE = "notasecret"; + static final String OAUTH_PVT_KEY_PROPERTY_NAME = "OAuthPvtKey"; + static final String OAUTH2_TOKEN_URI_PROPERTY_NAME = "OAUTH2"; + static final String HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME = "READ_API"; + static final String BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME = "BIGQUERY"; + static final String STS_ENDPOINT_OVERRIDE_PROPERTY_NAME = "STS"; + static final String OAUTH_ACCESS_TOKEN_PROPERTY_NAME = "OAuthAccessToken"; + static final String OAUTH_REFRESH_TOKEN_PROPERTY_NAME = "OAuthRefreshToken"; + static final String OAUTH_CLIENT_ID_PROPERTY_NAME = "OAuthClientId"; + static final String OAUTH_CLIENT_SECRET_PROPERTY_NAME = "OAuthClientSecret"; + static final String ENABLE_HTAPI_PROPERTY_NAME = "EnableHighThroughputAPI"; + static final String PROXY_HOST_PROPERTY_NAME = "ProxyHost"; + static final String PROXY_PORT_PROPERTY_NAME = "ProxyPort"; + static final String PROXY_USER_ID_PROPERTY_NAME = "ProxyUid"; + static final String PROXY_PASSWORD_PROPERTY_NAME = "ProxyPwd"; + static final String HTTP_CONNECT_TIMEOUT_PROPERTY_NAME = "HttpConnectTimeout"; + static final String HTTP_READ_TIMEOUT_PROPERTY_NAME = "HttpReadTimeout"; + static final boolean DEFAULT_ENABLE_HTAPI_VALUE = false; + static final boolean DEFAULT_ENABLE_SESSION_VALUE = false; + static final int DEFAULT_LOG_LEVEL = 0; + static final String LOG_LEVEL_PROPERTY_NAME = "LogLevel"; + static final String LOG_PATH_PROPERTY_NAME = "LogPath"; + static final String LOG_LEVEL_ENV_VAR = "BIGQUERY_JDBC_LOG_LEVEL"; + static final String LOG_PATH_ENV_VAR = "BIGQUERY_JDBC_LOG_PATH"; + static final String ENABLE_SESSION_PROPERTY_NAME = "EnableSession"; + static final String DEFAULT_LOG_PATH = ""; + static final String USE_QUERY_CACHE_PROPERTY_NAME = "UseQueryCache"; + static final boolean DEFAULT_USE_QUERY_CACHE = true; + static final String JOB_CREATION_MODE_PROPERTY_NAME = "JobCreationMode"; + static final int DEFAULT_JOB_CREATION_MODE = 2; + static final String MAX_RESULTS_PROPERTY_NAME = "MaxResults"; + static final long DEFAULT_MAX_RESULTS_VALUE = 10000; + static final String BYOID_AUDIENCE_URI_PROPERTY_NAME = "BYOID_AudienceUri"; + static final String BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME = "BYOID_CredentialSource"; + static final String BYOID_POOL_USER_PROJECT_PROPERTY_NAME = "BYOID_PoolUserProject"; + static final String BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME = "BYOID_SA_Impersonation_Uri"; + static final String BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME = "BYOID_SubjectTokenType"; + static final String DEFAULT_BYOID_SUBJECT_TOKEN_TYPE_VALUE = + "urn:ietf:params:oauth:tokentype:id_token"; + static final String BYOID_TOKEN_URI_PROPERTY_NAME = "BYOID_TokenUri"; + static final String DEFAULT_BYOID_TOKEN_URI_VALUE = "https://sts.googleapis.com/v1/token"; + static final String PARTNER_TOKEN_PROPERTY_NAME = "PartnerToken"; + private static final Pattern PARTNER_TOKEN_PATTERN = + Pattern.compile( + "(?:^|(?<=;))" + PARTNER_TOKEN_PROPERTY_NAME + "=\\s*((?:\\([^)]*\\)|[^;])*?)(?=(?:;|$))", + Pattern.CASE_INSENSITIVE); + static final String METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME = "MetaDataFetchThreadCount"; + static final int DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE = 32; + static final String RETRY_TIMEOUT_IN_SECS_PROPERTY_NAME = "Timeout"; + static final long DEFAULT_RETRY_TIMEOUT_IN_SECS_VALUE = 0L; + static final String JOB_TIMEOUT_PROPERTY_NAME = "JobTimeout"; + static final long DEFAULT_JOB_TIMEOUT_VALUE = 0L; + static final String RETRY_INITIAL_DELAY_PROPERTY_NAME = "RetryInitialDelay"; + static final long DEFAULT_RETRY_INITIAL_DELAY_VALUE = 0L; + static final String RETRY_MAX_DELAY_PROPERTY_NAME = "RetryMaxDelay"; + static final long DEFAULT_RETRY_MAX_DELAY_VALUE = 0L; + static final String ADDITIONAL_PROJECTS_PROPERTY_NAME = "AdditionalProjects"; + // Applicable only for connection pooling. + static final String CONNECTION_POOL_SIZE_PROPERTY_NAME = "ConnectionPoolSize"; + static final long DEFAULT_CONNECTION_POOL_SIZE_VALUE = 10L; + static final String LISTENER_POOL_SIZE_PROPERTY_NAME = "ListenerPoolSize"; + static final long DEFAULT_LISTENER_POOL_SIZE_VALUE = 10L; + static final String ENABLE_WRITE_API_PROPERTY_NAME = "EnableWriteAPI"; + static final boolean DEFAULT_ENABLE_WRITE_API_VALUE = false; + static final String SWA_APPEND_ROW_COUNT_PROPERTY_NAME = "SWA_AppendRowCount"; + static final int DEFAULT_SWA_APPEND_ROW_COUNT_VALUE = 1000; + static final String SWA_ACTIVATION_ROW_COUNT_PROPERTY_NAME = "SWA_ActivationRowCount"; + static final int DEFAULT_SWA_ACTIVATION_ROW_COUNT_VALUE = 3; + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJdbcUrlUtility.class.getName()); + static final String FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME = + "FilterTablesOnDefaultDataset"; + static final boolean DEFAULT_FILTER_TABLES_ON_DEFAULT_DATASET_VALUE = false; + static final String REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME = "RequestGoogleDriveScope"; + static final String SSL_TRUST_STORE_PROPERTY_NAME = "SSLTrustStore"; + static final String SSL_TRUST_STORE_PWD_PROPERTY_NAME = "SSLTrustStorePwd"; + static final int DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE = 0; + static final String MAX_BYTES_BILLED_PROPERTY_NAME = "MaximumBytesBilled"; + static final Long DEFAULT_MAX_BYTES_BILLED_VALUE = 0L; + static final String LABELS_PROPERTY_NAME = "Labels"; + static final List OVERRIDE_PROPERTIES = + Arrays.asList( + BIGQUERY_ENDPOINT_OVERRIDE_PROPERTY_NAME, + OAUTH2_TOKEN_URI_PROPERTY_NAME, + HTAPI_ENDPOINT_OVERRIDE_PROPERTY_NAME, + STS_ENDPOINT_OVERRIDE_PROPERTY_NAME); + static final String REQUEST_REASON_PROPERTY_NAME = "RequestReason"; + static final List BYOID_PROPERTIES = + Arrays.asList( + BYOID_AUDIENCE_URI_PROPERTY_NAME, + BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME, + BYOID_POOL_USER_PROJECT_PROPERTY_NAME, + BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME, + BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME, + BYOID_TOKEN_URI_PROPERTY_NAME); + + static Set PROXY_PROPERTIES = + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_HOST_PROPERTY_NAME) + .setDescription("The host name of the proxy server.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_PORT_PROPERTY_NAME) + .setDescription( + "The port number of the proxy server to connect to. No defaulting" + + " behavior happens.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_USER_ID_PROPERTY_NAME) + .setDescription("The user name for an authenticated proxy server.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROXY_PASSWORD_PROPERTY_NAME) + .setDescription("The password for an authenticated proxy server.") + .build()))); + + static Set AUTH_PROPERTIES = + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_TYPE_PROPERTY_NAME) + .setDescription( + "This option specifies how the connector obtains or provides the" + + " credentials for OAuth\n" + + "2.0 authentication") + .setDefaultValue(String.valueOf(DEFAULT_OAUTH_TYPE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_EMAIL_PROPERTY_NAME) + .setDescription( + "The Service Account email use for Service Account Authentication.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_PVT_KEY_PATH_PROPERTY_NAME) + .setDescription( + "The location of the credentials file used for this connection.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_PVT_KEY_PROPERTY_NAME) + .setDescription("The OAuth private key used for this connection.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_REFRESH_TOKEN_PROPERTY_NAME) + .setDescription( + "The pre-generated refresh token to be used with BigQuery for" + + " authentication.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_ACCESS_TOKEN_PROPERTY_NAME) + .setDescription( + "The pre-generated access token to be used with BigQuery for" + + " authentication.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_CLIENT_ID_PROPERTY_NAME) + .setDescription( + "The client ID to be used for user authentication or to refresh" + + " pre-generated tokens.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_CLIENT_SECRET_PROPERTY_NAME) + .setDescription( + "The client secret to be used for user authentication or to refresh" + + " pre-generated tokens.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME) + .setDescription("The service account email to be impersonated.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME) + .setDescription( + "Comma separated list of service account emails in the impersonation" + + " chain.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME) + .setDescription( + "Comma separated list of OAuth2 scopes to use with impersonated account.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME) + .setDescription("Impersonated account token lifetime.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(OAUTH_P12_PASSWORD_PROPERTY_NAME) + .setDescription("Password for p12 secret file.") + .build()))); + + static Set VALID_PROPERTIES = + Collections.unmodifiableSet( + new HashSet<>( + Arrays.asList( + BigQueryConnectionProperty.newBuilder() + .setName(MAX_BYTES_BILLED_PROPERTY_NAME) + .setDescription( + " Limits the bytes billed for this query. Queries with bytes billed above" + + " this limit will fail (without incurring a charge). If" + + " unspecified, the project default is used.") + .setDefaultValue(String.valueOf(DEFAULT_MAX_BYTES_BILLED_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(CONNECTION_POOL_SIZE_PROPERTY_NAME) + .setDescription("Connection pool size if connection pooling is enabled.") + .setDefaultValue(String.valueOf(DEFAULT_CONNECTION_POOL_SIZE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LISTENER_POOL_SIZE_PROPERTY_NAME) + .setDescription("Listener pool size if connection pooling is enabled.") + .setDefaultValue(String.valueOf(DEFAULT_LISTENER_POOL_SIZE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(RETRY_INITIAL_DELAY_PROPERTY_NAME) + .setDescription("Initial delay, in seconds, before the first retry.") + .setDefaultValue(String.valueOf(DEFAULT_RETRY_INITIAL_DELAY_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(RETRY_MAX_DELAY_PROPERTY_NAME) + .setDescription("Max limit for the retry delay, in seconds.") + .setDefaultValue(String.valueOf(DEFAULT_RETRY_MAX_DELAY_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(RETRY_TIMEOUT_IN_SECS_PROPERTY_NAME) + .setDescription( + "The length of time, in seconds, for which the connector retries a failed" + + " API call before timing out.") + .setDefaultValue(String.valueOf(DEFAULT_RETRY_TIMEOUT_IN_SECS_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(JOB_TIMEOUT_PROPERTY_NAME) + .setDescription( + "Job timeout (in seconds) after which the job is cancelled on the server") + .setDefaultValue(String.valueOf(DEFAULT_JOB_TIMEOUT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME) + .setDescription( + "This option determines whether the connector uses the REST API or" + + " returns an error when encountering fetch workflows unsupported by" + + " the High-Throughput API.") + .setDefaultValue(String.valueOf(DEFAULT_UNSUPPORTED_HTAPI_FALLBACK_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME) + .setDescription( + "The expiration time (in milliseconds) for tables in a user-specified" + + " large result dataset.") + .setDefaultValue( + String.valueOf(DEFAULT_DESTINATION_DATASET_EXPIRATION_TIME_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME) + .setDescription( + "The name of the partner-operated cloud which is a new instance of Google" + + " production, known as a Trusted Partner Cloud universe.") + .setDefaultValue(DEFAULT_UNIVERSE_DOMAIN_VALUE) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PROJECT_ID_PROPERTY_NAME) + .setDescription("A globally unique identifier for your project.") + .setLazyDefaultValue(() -> BigQueryOptions.getDefaultProjectId()) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LOG_PATH_PROPERTY_NAME) + .setDescription( + "The directory where the connector saves log files (when logging is" + + " enabled).") + .setDefaultValue(DEFAULT_LOG_PATH) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(DEFAULT_DATASET_PROPERTY_NAME) + .setDescription( + "This default dataset for query execution. If this option is set, queries" + + " with unqualified \n" + + "table names will run against this dataset.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LOCATION_PROPERTY_NAME) + .setDescription( + "The location where datasets are created/queried. The location will be" + + " determined\n" + + " automatically by BigQuery if not specified.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ENABLE_HTAPI_PROPERTY_NAME) + .setDescription( + "Enables or disables Read API usage in the Driver. Disabled by default.") + .setDefaultValue(String.valueOf(DEFAULT_ENABLE_HTAPI_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(HTAPI_ACTIVATION_RATIO_PROPERTY_NAME) + .setDescription( + "Connector switches to BigQuery Storage API when the number of pages" + + " exceed this value.") + .setDefaultValue(String.valueOf(DEFAULT_HTAPI_ACTIVATION_RATIO_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(KMS_KEY_NAME_PROPERTY_NAME) + .setDescription( + "The KMS key name tells BigQuery which key to use when encrypting or" + + " decrypting your data.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(QUERY_PROPERTIES_NAME) + .setDescription( + "Connection-level properties to customize query behavior.") // TODO: + // Figure out + // a clean way + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LABELS_PROPERTY_NAME) + .setDescription( + "Labels associated with the query to organize and group query jobs.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME) + .setDescription( + "If the number of total rows exceeds this value, the connector switches" + + " to the BigQuery Storage API for faster processing.") + .setDefaultValue(String.valueOf(DEFAULT_HTAPI_MIN_TABLE_SIZE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ENABLE_SESSION_PROPERTY_NAME) + .setDescription( + "Enable to capture your SQL activities or enable multi statement" + + " transactions. Disabled by default.") + .setDefaultValue(String.valueOf(DEFAULT_ENABLE_SESSION_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LOG_LEVEL_PROPERTY_NAME) + .setDescription( + "Sets the Log Level for the Driver. Set to Level.OFF by default.") + .setDefaultValue(String.valueOf(DEFAULT_LOG_LEVEL)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(USE_QUERY_CACHE_PROPERTY_NAME) + .setDescription("Enables or disables Query caching. Set to true by default.") + .setDefaultValue(String.valueOf(DEFAULT_USE_QUERY_CACHE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(QUERY_DIALECT_PROPERTY_NAME) + .setDescription( + "Parameter for selecting if the queries should use standard or legacy SQL" + + " syntax.") + .setDefaultValue(DEFAULT_QUERY_DIALECT_VALUE) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ALLOW_LARGE_RESULTS_PROPERTY_NAME) + .setDescription( + "Enabled by default, must be used with legacy SQL. Used for setting" + + " destination table & dataset.") + .setDefaultValue(String.valueOf(DEFAULT_ALLOW_LARGE_RESULTS)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LARGE_RESULTS_TABLE_PROPERTY_NAME) + .setDescription("The destination table where queries are saved.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(LARGE_RESULTS_DATASET_PROPERTY_NAME) + .setDescription("The destination dataset where queries are saved.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(JOB_CREATION_MODE_PROPERTY_NAME) + .setDescription( + "Enables or disables Stateless Query mode. Set to false by default.") + .setDefaultValue(String.valueOf(DEFAULT_JOB_CREATION_MODE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(MAX_RESULTS_PROPERTY_NAME) + .setDescription("Maximum number of results per page") + .setDefaultValue(String.valueOf(DEFAULT_MAX_RESULTS_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_AUDIENCE_URI_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. Corresponds to the audience" + + " property\n" + + " in the external account configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The file location or the URI" + + " of\n" + + " the subject token. Corresponds to the credential_source property" + + " in\n" + + " the external account configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_POOL_USER_PROJECT_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The project number associated" + + " with\n" + + " the workforce pool. Corresponds to the" + + " workforce_pool_user_project\n" + + " property in the external account configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The service account email." + + " Only\n" + + " present when service account impersonation is used. Corresponds" + + " to\n" + + " the service_account_impersonation_url property in the external" + + " account\n" + + " configuration file.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The subject token type." + + " Corresponds\n" + + " to the subject_token_type property in the external account" + + " configuration file.") + .setDefaultValue(DEFAULT_BYOID_SUBJECT_TOKEN_TYPE_VALUE) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(BYOID_TOKEN_URI_PROPERTY_NAME) + .setDescription( + "Used for External Account Authentication. The URI used to generate" + + " authentication\n" + + " tokens. Corresponds to the token_url property in the external" + + " account\n" + + " configuration file.") + .setDefaultValue(DEFAULT_BYOID_TOKEN_URI_VALUE) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(PARTNER_TOKEN_PROPERTY_NAME) + .setDescription("The partner name and environment.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME) + .setDescription( + "The number of threads used to call a DatabaseMetaData method.") + .setDefaultValue(String.valueOf(DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ENABLE_WRITE_API_PROPERTY_NAME) + .setDescription( + "Enables or disables Write API usage for bulk inserts in the Driver." + + " Disabled by default.") + .setDefaultValue(String.valueOf(DEFAULT_ENABLE_WRITE_API_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SWA_ACTIVATION_ROW_COUNT_PROPERTY_NAME) + .setDescription( + "Connector switches to BigQuery Storage Write API when the number of rows" + + " for executeBatch insert exceed this value. Do not change unless" + + " necessary.") + .setDefaultValue(String.valueOf(DEFAULT_SWA_ACTIVATION_ROW_COUNT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SWA_APPEND_ROW_COUNT_PROPERTY_NAME) + .setDescription("Size of the write stream. Do not change unless necessary.") + .setDefaultValue(String.valueOf(DEFAULT_SWA_APPEND_ROW_COUNT_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(ADDITIONAL_PROJECTS_PROPERTY_NAME) + .setDescription( + "A comma-separated list of Google Cloud project IDs that can be accessed" + + " for querying, in addition to the primary project specified in the" + + " connection.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME) + .setDescription( + "If true and DefaultDataset is set, DatabaseMetaData.getTables() and" + + " .getColumns() will filter results based on the DefaultDataset" + + " when catalog/schema patterns are null or wildcards.") + .setDefaultValue( + String.valueOf(DEFAULT_FILTER_TABLES_ON_DEFAULT_DATASET_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME) + .setDescription( + "Enables or disables whether the connector requests access to Google" + + " Drive. Set to false (0) by default.") + .setDefaultValue(String.valueOf(DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE)) + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SSL_TRUST_STORE_PROPERTY_NAME) + .setDescription( + "The full path of the Java TrustStore containing the server certificate" + + " for one-way SSL authentication.\n" + + "If the trust store requires a password, provide it using the" + + " property SSLTrustStorePwd.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(SSL_TRUST_STORE_PWD_PROPERTY_NAME) + .setDescription( + "The password for accessing the Java TrustStore that is specified using" + + " the property SSLTrustStore.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(HTTP_CONNECT_TIMEOUT_PROPERTY_NAME) + .setDescription( + "The timeout (in milliseconds) for establishing a connection to the" + + " server.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(HTTP_READ_TIMEOUT_PROPERTY_NAME) + .setDescription("The timeout (in milliseconds) when reading from the server.") + .build(), + BigQueryConnectionProperty.newBuilder() + .setName(REQUEST_REASON_PROPERTY_NAME) + .setDescription( + "Reason for the request, which is passed as the x-goog-request-reason" + + " header.") + .build()))); + + private static final List NETWORK_PROPERTIES = + ImmutableList.of( + PARTNER_TOKEN_PROPERTY_NAME, + ENDPOINT_OVERRIDES_PROPERTY_NAME, + PRIVATE_SERVICE_CONNECT_PROPERTY_NAME); + + private static final Map PROPERTY_NAME_MAP; + + static { + Map map = new HashMap<>(); + for (BigQueryConnectionProperty p : VALID_PROPERTIES) { + map.put(p.getName().toUpperCase(), p.getName()); + } + for (BigQueryConnectionProperty p : AUTH_PROPERTIES) { + map.put(p.getName().toUpperCase(), p.getName()); + } + for (BigQueryConnectionProperty p : PROXY_PROPERTIES) { + map.put(p.getName().toUpperCase(), p.getName()); + } + for (String p : OVERRIDE_PROPERTIES) { + map.put(p.toUpperCase(), p); + } + for (String p : BYOID_PROPERTIES) { + map.put(p.toUpperCase(), p); + } + for (String p : NETWORK_PROPERTIES) { + map.put(p.toUpperCase(), p); + } + PROPERTY_NAME_MAP = Collections.unmodifiableMap(map); + } + + private BigQueryJdbcUrlUtility() {} + + /** + * Parses a URI property from the given URI. + * + * @param uri The URI to parse. + * @param property The name of the property to parse. + * @return The String value of the property, or the default value if the property is not found. + */ + static String parseUriProperty(String uri, String property) { + Map map = parseUrl(uri); + if (PROPERTY_NAME_MAP.containsKey(property.toUpperCase())) { + return map.get(PROPERTY_NAME_MAP.get(property.toUpperCase())); + } + return map.get(property); + } + + /** + * Parses the URL into a map of key-value pairs, validating that all keys are known properties. + * + * @param url The URL to parse. + * @return A map of property names to values. + * @throws BigQueryJdbcRuntimeException if an unknown property is found or the URL is malformed. + */ + static Map parseUrl(String url) { + return PARSE_CACHE.computeIfAbsent(url, BigQueryJdbcUrlUtility::parseUrlInternal); + } + + private static Map parseUrlInternal(String url) { + Map map = new HashMap<>(); + if (url == null) { + return map; + } + + String[] urlParts = url.split(";", 2); + if (urlParts.length < 2) { + return map; + } + + String urlToParse = urlParts[1]; + + // Parse PartnerToken separately as it contains ';' + Matcher matcher = PARTNER_TOKEN_PATTERN.matcher(urlToParse); + if (matcher.find()) { + String rawToken = matcher.group(1).trim(); + String token = + (rawToken.startsWith("(") && rawToken.endsWith(")")) + ? rawToken.substring(1, rawToken.length() - 1).trim() + : rawToken; + + if (token.toUpperCase().startsWith("GPN:")) { + map.put(PARTNER_TOKEN_PROPERTY_NAME, " (" + token + ")"); + } + urlToParse = matcher.replaceFirst(""); + } + + String[] parts = urlToParse.split(";"); + for (String part : parts) { + if (part.trim().isEmpty()) { + continue; + } + String[] kv = part.split("=", 2); + String key = kv[0].trim().toUpperCase(); + if (kv.length != 2 || !PROPERTY_NAME_MAP.containsKey(key)) { + String ref = (kv.length == 2) ? key : part; + String safeRef = ref.length() > 32 ? ref.substring(0, 32) + "..." : ref; + throw new BigQueryJdbcRuntimeException( + String.format("Wrong value or unknown setting: %s", safeRef)); + } + + map.put(PROPERTY_NAME_MAP.get(key), CharEscapers.decodeUriPath(kv[1].replace("+", "%2B"))); + } + return Collections.unmodifiableMap(map); + } + + /** + * Appends the given properties to the given URL. + * + * @param url The URL to append the properties to. + * @param properties The properties to append. + * @return The string value of the updated URL. + */ + static String appendPropertiesToURL(String url, String callerClassName, Properties properties) { + LOG.finest("++enter++ " + callerClassName); + StringBuilder urlBuilder = new StringBuilder(url); + for (Entry entry : properties.entrySet()) { + if (entry.getValue() != null && !"".equals(entry.getValue())) { + LOG.finest("Appending %s with value %s to URL", entry.getKey(), entry.getValue()); + String encodedValue = + UrlEscapers.urlFormParameterEscaper() + .escape((String) entry.getValue()) + .replace("+", "%20"); + urlBuilder.append(";").append(entry.getKey()).append("=").append(encodedValue); + } + } + return urlBuilder.toString(); + } + + static boolean convertIntToBoolean(String value, String propertyName) { + int integerValue; + + try { + if (value.equalsIgnoreCase("true")) { + integerValue = 1; + } else if (value.equalsIgnoreCase("false")) { + integerValue = 0; + } else { + integerValue = Integer.parseInt(value); + } + + } catch (NumberFormatException ex) { + throw new IllegalArgumentException( + String.format( + "Invalid value for %s. For Boolean connection properties, use 0 for false and 1 for" + + " true.", + propertyName), + ex); + } + if (integerValue == 1) { + return true; + } else if (integerValue == 0) { + return false; + } else { + throw new IllegalArgumentException( + String.format( + "Invalid value for %s. For Boolean connection properties, use 0 for false and 1 for" + + " true.", + propertyName)); + } + } + + public static Level parseLogLevel(String logLevelString) { + int logLevel = logLevelString != null ? Integer.parseInt(logLevelString) : DEFAULT_LOG_LEVEL; + switch (logLevel) { + case 8: + return Level.ALL; + case 7: + return Level.FINEST; + case 6: + return Level.FINER; + case 5: + return Level.FINE; + case 4: + return Level.CONFIG; + case 3: + return Level.INFO; + case 2: + return Level.WARNING; + case 1: + return Level.SEVERE; + case 0: + default: + LOG.info("%s value not provided, defaulting to %s.", LOG_LEVEL_PROPERTY_NAME, Level.OFF); + return Level.OFF; + } + } + + static Map parsePropertiesMapFromValue( + String propertiesString, String propertyName, String context) { + if (propertiesString == null || propertiesString.isEmpty()) { + LOG.fine("Unable to parse property name: %s from context: %s", propertyName, context); + return null; + } + Map propertiesMap = new HashMap<>(); + for (String keyValuePair : Splitter.on(",").split(propertiesString)) { + List parts = Splitter.on("=").limit(2).splitToList(keyValuePair); + if (parts.size() == 2) { + propertiesMap.put(parts.get(0), parts.get(1)); + } else { + LOG.warning( + "Invalid KeyValue pair: %s found in context: %s for property name: %s", + keyValuePair, context, propertyName); + } + } + return propertiesMap; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArray.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArray.java new file mode 100644 index 0000000000..3b557a15a7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArray.java @@ -0,0 +1,105 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryFieldValueListWrapper.getNestedFieldValueListWrapper; + +import com.google.api.core.InternalApi; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.Schema; +import java.sql.ResultSet; +import java.util.List; + +/** An implementation of {@link BigQueryBaseArray} used to represent Array values from Json data. */ +@InternalApi +class BigQueryJsonArray extends BigQueryBaseArray { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJsonArray.class.getName()); + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + private List values; + + BigQueryJsonArray(Field schema, FieldValue values) { + super(schema); + this.values = (values == null || values.isNull()) ? null : values.getRepeatedValue(); + } + + @Override + public Object getArray() { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return null; + } + return getArrayInternal(0, this.values.size()); + } + + @Override + public Object getArray(long index, int count) { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return null; + } + Tuple range = createRange(index, count, this.values.size()); + return getArrayInternal(range.x(), range.y()); + } + + @Override + public ResultSet getResultSet() { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return new BigQueryJsonResultSet(); + } + BigQueryFieldValueListWrapper bigQueryFieldValueListWrapper = + getNestedFieldValueListWrapper(FieldList.of(singleElementSchema()), this.values); + return BigQueryJsonResultSet.getNestedResultSet( + Schema.of(this.schema), bigQueryFieldValueListWrapper, 0, this.values.size()); + } + + @Override + public ResultSet getResultSet(long index, int count) { + ensureValid(); + LOG.finest("++enter++"); + if (this.values == null) { + return new BigQueryJsonResultSet(); + } + Tuple range = createRange(index, count, this.values.size()); + BigQueryFieldValueListWrapper bigQueryFieldValueListWrapper = + getNestedFieldValueListWrapper(FieldList.of(singleElementSchema()), this.values); + return BigQueryJsonResultSet.getNestedResultSet( + Schema.of(this.schema), bigQueryFieldValueListWrapper, range.x(), range.y()); + } + + @Override + public void free() { + this.values = null; + markInvalid(); + } + + @Override + Object getCoercedValue(int index) { + FieldValue fieldValue = this.values.get(index); + return this.arrayOfStruct + ? new BigQueryJsonStruct(this.schema.getSubFields(), fieldValue) + : BIGQUERY_TYPE_COERCER.coerceTo(getTargetClass(), fieldValue); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSet.java new file mode 100644 index 0000000000..da2ade028e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSet.java @@ -0,0 +1,320 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; +import static com.google.cloud.bigquery.jdbc.BigQueryBaseStruct.isStruct; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.concurrent.BlockingQueue; + +/** {@link ResultSet} Implementation for JSON datasource (Using REST APIs) */ +class BigQueryJsonResultSet extends BigQueryBaseResultSet { + private final long totalRows; + private final BlockingQueue buffer; + private boolean hasReachedEnd = false; + // Points to the current record + private BigQueryFieldValueListWrapper cursor; + // Tracks the index of the nested element under process + private int nestedRowIndex; + private long rowCnt = 0; + private boolean afterLast = false; + private final int fromIndex; + private final int toIndexExclusive; + private final Thread[] ownedThreads; + + private BigQueryJsonResultSet( + Schema schema, + long totalRows, + BlockingQueue buffer, + BigQueryStatement statement, + boolean isNested, + BigQueryFieldValueListWrapper cursor, + int fromIndex, + int toIndexExclusive, + Thread[] ownedThreads, + BigQuery bigQuery) { + super(bigQuery, statement, schema, isNested); + this.totalRows = totalRows; + this.buffer = buffer; + this.cursor = cursor; + this.fromIndex = fromIndex; + this.toIndexExclusive = toIndexExclusive; + this.nestedRowIndex = fromIndex - 1; + this.ownedThreads = ownedThreads; + } + + /** + * This method returns an instance of BigQueryJsonResultSet after adding it in the list of + * JsonResultSetFinalizer + * + * @return BigQueryJsonResultSet + */ + static BigQueryJsonResultSet of( + Schema schema, + long totalRows, + BlockingQueue buffer, + BigQueryStatement statement, + Thread[] ownedThreads, + BigQuery bigQuery) { + + return new BigQueryJsonResultSet( + schema, totalRows, buffer, statement, false, null, -1, -1, ownedThreads, bigQuery); + } + + static BigQueryJsonResultSet of( + Schema schema, + long totalRows, + BlockingQueue buffer, + BigQueryStatement statement, + Thread[] ownedThreads) { + + return new BigQueryJsonResultSet( + schema, totalRows, buffer, statement, false, null, -1, -1, ownedThreads, null); + } + + BigQueryJsonResultSet() { + super(null, null, null, false); + totalRows = 0; + buffer = null; + fromIndex = 0; + ownedThreads = new Thread[0]; + toIndexExclusive = 0; + } + + // + + /** + * Wrapper method which can be used for initialising the instance of BigQueryJsonResultSet for the + * nested Records + * + * @param schema Table schema + * @param cursor Points to the current record + * @param fromIndex starting index under consideration + * @param toIndexExclusive last index under consideration + * @return The BigQueryJsonResultSet + */ + static BigQueryJsonResultSet getNestedResultSet( + Schema schema, BigQueryFieldValueListWrapper cursor, int fromIndex, int toIndexExclusive) { + return new BigQueryJsonResultSet( + schema, + -1, + null, + null, /* statement will be null in case of nested java.sql.Result. */ + true, + cursor, + fromIndex, + toIndexExclusive, + null, + null); + } + + /* Advances the result set to the next row, returning false if no such row exists. Potentially blocking operation */ + public boolean next() throws SQLException { + checkClosed(); + if (this.isNested) { + // We are working with the nested record, the cursor would have been + // populated. + if (this.cursor == null || this.cursor.getArrayFieldValueList() == null) { + throw new IllegalStateException( + "Cursor/ArrayFieldValueList can not be null working with the nested record"); + } + // Check if there's a next record in the array which can be read + if (this.nestedRowIndex < (this.toIndexExclusive - 1)) { + this.nestedRowIndex++; + return true; + } + this.afterLast = true; + return false; + + } else { + // If end of stream is reached or we are past the last row i.e + // rowcnt == totalRows (rowcnt starts at 0) + // then we can simply return false + if (this.hasReachedEnd || this.isLast()) { + this.afterLast = true; + return false; + } + try { + // Advance the cursor,Potentially blocking operation + this.cursor = this.buffer.take(); + if (this.cursor.getException() != null) { + throw new BigQueryJdbcRuntimeException(this.cursor.getException()); + } + this.rowCnt++; + // Check for end of stream + if (this.cursor.isLast()) { + this.cursor = null; + this.hasReachedEnd = true; + return false; + } + // Cursor has been advanced + return true; + + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException( + "Error occurred while advancing the cursor. This could happen when connection is closed while we call the next method", + ex); + } + } + } + + @Override + public Object getObject(int columnIndex) throws SQLException { + // columnIndex is SQL index starting at 1 + checkClosed(); + LOG.finest("++enter++"); + FieldValue value = getObjectInternal(columnIndex); + if (value == null || value.isNull()) { + return null; + } + + if (this.isNested && columnIndex == 1) { + return this.bigQueryTypeCoercer.coerceTo(Integer.class, value); + } + + if (this.isNested && columnIndex == 2) { + Field arrayField = this.schema.getFields().get(0); + if (isStruct(arrayField)) { + return new BigQueryJsonStruct(arrayField.getSubFields(), value); + } + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + arrayField.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + + int extraIndex = this.isNested ? 2 : 1; + Field fieldSchema = this.schemaFieldList.get(columnIndex - extraIndex); + if (isArray(fieldSchema)) { + return new BigQueryJsonArray(fieldSchema, value); + } else if (isStruct(fieldSchema)) { + return new BigQueryJsonStruct(fieldSchema.getSubFields(), value); + } else { + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + fieldSchema.getType().getStandardType()); + return this.bigQueryTypeCoercer.coerceTo(targetClass, value); + } + } + + /** + * This method will be called by every other getter of this {@link java.sql.ResultSet}, including + * {@link #getObject(int)} to get the value in its rawest form i.e. {@link FieldValue} to coerce + * it further as required. + * + * @param columnIndex the first column is 1, the second is 2, ... + * @return an instance of {@link FieldValue} represents value at columnIndex column. + */ + private FieldValue getObjectInternal(int columnIndex) throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + FieldValue value; + if (this.isNested) { + boolean validIndexForNestedResultSet = columnIndex == 1 || columnIndex == 2; + // BigQuery doesn't support multidimensional arrays, so just the default row + // num column (1) and the actual column (2) is supposed to be read + if (!validIndexForNestedResultSet) { + throw new IllegalArgumentException( + "Column index is required to be 1 or 2 for the nested arrays"); + } + if (this.cursor.getArrayFieldValueList() == null + || this.cursor.getArrayFieldValueList().get(this.nestedRowIndex) == null) { + throw new IllegalStateException("ArrayFieldValueList cannot be null"); + } + + // For Arrays the first column is Index, ref: + // https://docs.oracle.com/javase/7/docs/api/java/sql/Array.html#getResultSet() + if (columnIndex == 1) { + return FieldValue.of(Attribute.PRIMITIVE, Integer.toString(this.nestedRowIndex + 1)); + } else { + // columnIndex = 2 + // This ignores the columnIndex, as there's just one column, and we have already incremented + // the nestedRowIndex + value = this.cursor.getArrayFieldValueList().get(this.nestedRowIndex); + } + } + // non nested, return the value + else { + // SQL Index to 0 based index + value = this.cursor.getFieldValueList().get(columnIndex - 1); + } + setWasNull(value.getValue()); + return value; + } + + @Override + public void close() { + LOG.fine("Closing BigqueryJsonResultSet %s.", this); + this.isClosed = true; + if (ownedThreads != null) { + for (Thread ownedThread : ownedThreads) { + if (!ownedThread.isInterrupted()) { + ownedThread.interrupt(); + } + } + } + super.close(); + } + + @Override + public boolean isBeforeFirst() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + if (this.isNested) { + return this.nestedRowIndex < this.fromIndex; + } else { + return this.cursor == null && this.rowCnt == 0; + } + } + + @Override + public boolean isAfterLast() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + return this.afterLast; + } + + @Override + public boolean isFirst() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + if (this.isNested) { + return this.nestedRowIndex == this.fromIndex; + } else { + return this.rowCnt == 1; + } + } + + @Override + public boolean isLast() throws SQLException { + checkClosed(); + LOG.finest("++enter++"); + if (this.isNested) { + return this.nestedRowIndex == this.toIndexExclusive - 1; + } else { + return this.rowCnt == this.totalRows; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStruct.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStruct.java new file mode 100644 index 0000000000..35217f8e71 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStruct.java @@ -0,0 +1,80 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryBaseArray.isArray; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import java.lang.reflect.Array; +import java.util.List; + +/** + * An implementation of {@link BigQueryBaseStruct} used to represent Struct values from Json data. + */ +@InternalApi +class BigQueryJsonStruct extends BigQueryBaseStruct { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryJsonStruct.class.getName()); + + private static final BigQueryTypeCoercer BIGQUERY_TYPE_COERCER = + BigQueryTypeCoercionUtility.INSTANCE; + + private final FieldList schema; + private final List values; + + public BigQueryJsonStruct(FieldList schema, FieldValue values) { + this.schema = schema; + this.values = (values == null || values.isNull()) ? null : values.getRecordValue(); + } + + @Override + FieldList getSchema() { + return this.schema; + } + + @Override + public Object[] getAttributes() { + LOG.finest("++enter++"); + int size = schema.size(); + Object[] attributes = (Object[]) Array.newInstance(Object.class, size); + + for (int index = 0; index < size; index++) { + Field currentSchema = schema.get(index); + FieldValue currentValue = values == null ? null : values.get(index); + Object coercedValue = getValue(currentSchema, currentValue); + Array.set(attributes, index, coercedValue); + } + return attributes; + } + + private Object getValue(Field currentSchema, FieldValue currentValue) { + LOG.finest("++enter++"); + if (isArray(currentSchema)) { + return new BigQueryJsonArray(currentSchema, currentValue); + } else if (isStruct(currentSchema)) { + return new BigQueryJsonStruct(currentSchema.getSubFields(), currentValue); + } else { + Class targetClass = + BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping.get( + currentSchema.getType().getStandardType()); + return BIGQUERY_TYPE_COERCER.coerceTo(targetClass, currentValue); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsConnection.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsConnection.java new file mode 100644 index 0000000000..1804cc14c4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsConnection.java @@ -0,0 +1,191 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.METHOD_NOT_IMPLEMENTED; + +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.Array; +import java.sql.Blob; +import java.sql.CallableStatement; +import java.sql.Clob; +import java.sql.Connection; +import java.sql.NClob; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Savepoint; +import java.sql.Struct; +import java.util.Map; +import java.util.Properties; +import java.util.concurrent.Executor; + +/** NoOps Abstract base class for BigQuery JDBC Connection. */ +abstract class BigQueryNoOpsConnection implements Connection { + + @Override + public CallableStatement prepareCall(String sql) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String nativeSQL(String sql) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isWrapperFor(Class iface) { + return false; + } + + @Override + public boolean isReadOnly() { + return false; + } + + @Override + public void setReadOnly(boolean readOnly) {} + + @Override + public void setCatalog(String catalog) {} + + // TODO: post MVP feature + + @Override + public Map> getTypeMap() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setTypeMap(Map> map) {} + + @Override + public Savepoint setSavepoint() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Savepoint setSavepoint(String name) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void rollback(Savepoint savepoint) {} + + @Override + public void releaseSavepoint(Savepoint savepoint) {} + + @Override + public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public PreparedStatement prepareStatement(String sql, int[] columnIndexes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public PreparedStatement prepareStatement(String sql, String[] columnNames) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Clob createClob() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Blob createBlob() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public NClob createNClob() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLXML createSQLXML() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isValid(int timeout) throws SQLException { + return false; + } + + @Override + public void setClientInfo(String name, String value) {} + + @Override + public String getClientInfo(String name) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Properties getClientInfo() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setClientInfo(Properties properties) {} + + @Override + public Array createArrayOf(String typeName, Object[] elements) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Struct createStruct(String typeName, Object[] attributes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getSchema() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setSchema(String schema) {} + + @Override + public void setNetworkTimeout(Executor executor, int milliseconds) {} + + @Override + public int getNetworkTimeout() { + return 0; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsResultSet.java new file mode 100644 index 0000000000..e4b29f7cd5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsResultSet.java @@ -0,0 +1,693 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.METHOD_NOT_IMPLEMENTED; + +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Map; + +/** NoOps Abstract base class for BigQuery JDBC ResultSet(s). */ +abstract class BigQueryNoOpsResultSet implements ResultSet { + + @Override + public int getFetchDirection() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setFetchSize(int rows) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int getFetchSize() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getCursorName() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean absolute(int row) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void beforeFirst() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void afterLast() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean first() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean last() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int getRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean relative(int rows) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean previous() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean rowUpdated() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean rowInserted() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean rowDeleted() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNull(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBoolean(int columnIndex, boolean x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateByte(int columnIndex, byte x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateShort(int columnIndex, short x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateInt(int columnIndex, int x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateLong(int columnIndex, long x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateFloat(int columnIndex, float x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDouble(int columnIndex, double x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateString(int columnIndex, String x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBytes(int columnIndex, byte[] x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDate(int columnIndex, Date x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTime(int columnIndex, Time x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(int columnIndex, Object x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNull(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBoolean(String columnLabel, boolean x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateByte(String columnLabel, byte x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateShort(String columnLabel, short x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateInt(String columnLabel, int x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateLong(String columnLabel, long x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateFloat(String columnLabel, float x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDouble(String columnLabel, double x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateString(String columnLabel, String x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBytes(String columnLabel, byte[] x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateDate(String columnLabel, Date x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTime(String columnLabel, Time x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, int length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, int length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateObject(String columnLabel, Object x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void insertRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void deleteRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void refreshRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void cancelRowUpdates() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void moveToInsertRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void moveToCurrentRow() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Object getObject(int columnIndex, Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Ref getRef(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Object getObject(String columnLabel, Map> map) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Ref getRef(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public URL getURL(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public URL getURL(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRef(int columnIndex, Ref x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRef(String columnLabel, Ref x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(int columnIndex, Blob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(String columnLabel, Blob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(int columnIndex, Clob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(String columnLabel, Clob x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateArray(int columnIndex, Array x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateArray(String columnLabel, Array x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public RowId getRowId(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public RowId getRowId(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRowId(int columnIndex, RowId x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateRowId(String columnLabel, RowId x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNString(int columnIndex, String nString) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNString(String columnLabel, String nString) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(int columnIndex, NClob nClob) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(String columnLabel, NClob nClob) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public NClob getNClob(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public NClob getNClob(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLXML getSQLXML(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLXML getSQLXML(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getNString(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public String getNString(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Reader getNCharacterStream(int columnIndex) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public Reader getNCharacterStream(String columnLabel) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream, long length) + throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(int columnIndex, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateClob(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(int columnIndex, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void updateNClob(String columnLabel, Reader reader) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T getObject(int columnIndex, Class type) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T getObject(String columnLabel, Class type) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public SQLWarning getWarnings() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public void clearWarnings() throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + void checkClosed() throws SQLException { + if (isClosed()) { + throw new BigQueryJdbcException("This " + getClass().getName() + " has been closed"); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsStatement.java new file mode 100644 index 0000000000..2e71bfaf75 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryNoOpsStatement.java @@ -0,0 +1,90 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.METHOD_NOT_IMPLEMENTED; + +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +abstract class BigQueryNoOpsStatement implements Statement { + + @Override + public void setCursorName(String name) throws SQLException { + // TODO: ResultSet Concurrency is read only(Not updatable) + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public ResultSet getGeneratedKeys() throws SQLException { + // TODO: Returns an empty resultset. + // return empty ResultSet + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int executeUpdate(String sql, int autoGeneratedKeys) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int executeUpdate(String sql, int[] columnIndexes) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public int executeUpdate(String sql, String[] columnNames) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean execute(String sql, int autoGeneratedKeys) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean execute(String sql, int[] columnIndexes) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } + + @Override + public boolean execute(String sql, String[] columnNames) throws SQLException { + // Implementation detailed in BigQuery JDBC Design - Wiring of executeQuery, executeUpdate and + // execute methods + throw new BigQueryJdbcSqlFeatureNotSupportedException(METHOD_NOT_IMPLEMENTED); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandler.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandler.java new file mode 100644 index 0000000000..5dbf731a0f --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandler.java @@ -0,0 +1,280 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.SQLException; +import java.util.ArrayList; + +class BigQueryParameterHandler { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + + public BigQueryParameterHandler(int parameterCount) { + this.parametersArraySize = parameterCount; + } + + BigQueryParameterHandler(int parameterCount, ArrayList parametersList) { + this.parametersArraySize = parameterCount; + this.parametersList = parametersList; + } + + // Indicates whether the parameter is input, output or both + // Default is UNSPECIFIED + // Used by CallableStatement + enum BigQueryStatementParameterType { + UNSPECIFIED, + IN, + OUT, + INOUT + }; + + private int parametersArraySize; + ArrayList parametersList = new ArrayList<>(parametersArraySize); + + private long highestIndex = 0; + + QueryJobConfiguration.Builder configureParameters( + QueryJobConfiguration.Builder jobConfigurationBuilder) throws SQLException { + LOG.finest("++enter++"); + try { + for (int i = 1; i <= this.parametersArraySize; i++) { + + Object parameterValue = getParameter(i); + StandardSQLTypeName sqlType = getSqlType(i); + LOG.finest( + "Parameter %s of type %s at index %s added to QueryJobConfiguration", + parameterValue, sqlType, i); + jobConfigurationBuilder.addPositionalParameter( + QueryParameterValue.of(parameterValue, sqlType)); + } + } catch (NullPointerException e) { + if (e.getMessage().contains("Null type")) { + throw new BigQueryJdbcException("One or more parameters missing in Prepared statement.", e); + } + } + return jobConfigurationBuilder; + } + + void setParameter(int parameterIndex, Object value, Class type) + throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + LOG.finest("setParameter called by : %s", type.getName()); + checkValidIndex(parameterIndex); + + int arrayIndex = parameterIndex - 1; + if (parameterIndex >= this.highestIndex || this.parametersList.get(arrayIndex) == null) { + parametersList.ensureCapacity(parameterIndex); + while (parametersList.size() < parameterIndex) { + parametersList.add(null); + } + parametersList.set(arrayIndex, new BigQueryJdbcParameter()); + } + this.highestIndex = Math.max(parameterIndex, highestIndex); + BigQueryJdbcParameter parameter = parametersList.get(arrayIndex); + + parameter.setIndex(parameterIndex); + parameter.setValue(value); + parameter.setType(type); + parameter.setSqlType(BigQueryJdbcTypeMappings.classToType(type)); + parameter.setParamName(""); + parameter.setParamType(BigQueryStatementParameterType.UNSPECIFIED); + parameter.setScale(-1); + + LOG.finest("Parameter set { %s }", parameter.toString()); + } + + private void checkValidIndex(int parameterIndex) { + if (parameterIndex > this.parametersArraySize) { + throw new IndexOutOfBoundsException("All parameters already provided."); + } + } + + Object getParameter(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getValue(); + } + + Class getType(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getType(); + } + + StandardSQLTypeName getSqlType(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getSqlType(); + } + + void clearParameters() { + LOG.finest("++enter++"); + parametersList.clear(); + highestIndex = 0; + } + + // set parameter by name and type + void setParameter( + String paramName, + Object value, + Class type, + BigQueryStatementParameterType paramType, + int scale) + throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + LOG.finest("setParameter called by : %s", type.getName()); + if (paramName == null || paramName.isEmpty()) { + throw new IllegalArgumentException("paramName cannot be null or empty"); + } + BigQueryJdbcParameter parameter = null; + for (BigQueryJdbcParameter p : parametersList) { + if (paramName.equals(p.getParamName())) { + parameter = p; + break; + } + } + if (parameter == null) { + // Add new parameter. + parameter = new BigQueryJdbcParameter(); + parameter.setIndex(-1); + } + parameter.setValue(value); + parameter.setType(type); + parameter.setSqlType(BigQueryJdbcTypeMappings.classToType(type)); + parameter.setParamName(paramName); + parameter.setParamType(paramType); + parameter.setScale(scale); + if (parameter.getIndex() == -1) { + parametersList.add(parameter); + } + LOG.finest("Parameter set { %s }", parameter.toString()); + } + + // set parameter by index and type + void setParameter( + int parameterIndex, + Object value, + Class type, + BigQueryStatementParameterType paramType, + int scale) + throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + LOG.finest("setParameter called by : %s", type.getName()); + checkValidIndex(parameterIndex); + int arrayIndex = parameterIndex - 1; + if (parameterIndex >= this.highestIndex || this.parametersList.get(arrayIndex) == null) { + parametersList.ensureCapacity(parameterIndex); + while (parametersList.size() < parameterIndex) { + parametersList.add(null); + } + parametersList.set(arrayIndex, new BigQueryJdbcParameter()); + } + this.highestIndex = Math.max(parameterIndex, highestIndex); + BigQueryJdbcParameter parameter = parametersList.get(arrayIndex); + + parameter.setIndex(parameterIndex); + parameter.setValue(value); + parameter.setType(type); + parameter.setSqlType(BigQueryJdbcTypeMappings.classToType(type)); + parameter.setParamName(""); + parameter.setParamType(paramType); + parameter.setScale(scale); + + LOG.finest("Parameter set { %s }", parameter.toString()); + } + + // Get Parameter by name + Object getParameter(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getValue(); + } + } + return null; + } + + // Get parameter type by index + BigQueryStatementParameterType getParameterType(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return null; + } + return parametersList.get(arrayIndex).getParamType(); + } + + // Get parameter type by name + BigQueryStatementParameterType getParameterType(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getParamType(); + } + } + return null; + } + + // Get scale type by index + int getParameterScale(int index) { + // Index is 1-based. Converting to 0 based for java. + int arrayIndex = index - 1; + if (parametersList.size() <= arrayIndex || parametersList.get(arrayIndex) == null) { + return -1; + } + return parametersList.get(arrayIndex).getScale(); + } + + // Get parameter scale by name + int getParameterScale(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getScale(); + } + } + return -1; + } + + Class getType(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getType(); + } + } + return null; + } + + StandardSQLTypeName getSqlType(String name) { + for (BigQueryJdbcParameter p : parametersList) { + if (name.equals(p.getParamName())) { + return p.getSqlType(); + } + } + return null; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnection.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnection.java new file mode 100644 index 0000000000..f3f5e22865 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnection.java @@ -0,0 +1,492 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.common.annotations.VisibleForTesting; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.UUID; +import java.util.concurrent.Executor; +import java.util.concurrent.LinkedBlockingDeque; +import javax.sql.ConnectionEvent; +import javax.sql.ConnectionEventListener; +import javax.sql.PooledConnection; +import javax.sql.StatementEventListener; + +class BigQueryPooledConnection implements PooledConnection { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + + private String id; // Mainly for internal use + private Connection bqConnection; + private boolean inUse = false; + private Long listenerPoolSize = 10L; + private LinkedBlockingDeque listeners; + + BigQueryPooledConnection(BigQueryConnection bqConnection) { + this.bqConnection = bqConnection; + this.id = UUID.randomUUID().toString(); + this.listenerPoolSize = bqConnection.getListenerPoolSize(); + if (getListenerPoolSize() > 0L) { + listeners = new LinkedBlockingDeque<>(getListenerPoolSize().intValue()); + } else { + listeners = new LinkedBlockingDeque<>(); + } + } + + Long getListenerPoolSize() { + return listenerPoolSize; + } + + @VisibleForTesting + boolean inUse() { + return inUse; + } + + @VisibleForTesting + boolean isListenerPooled(ConnectionEventListener l) { + return listeners.contains(l); + } + + @Override + public synchronized Connection getConnection() throws SQLException { + LOG.finest("++enter++"); + if (inUse) { + throw new SQLException("PooledConnection is already in use."); + } + inUse = true; + // Return a wrapper around the underlying physical connection. + return new BigQueryPooledConnectionWrapper(bqConnection, this); + } + + @Override + public synchronized void close() throws SQLException { + LOG.finest("++enter++"); + // Notify listeners that the *PooledConnection* is being closed. + ConnectionEvent event = new ConnectionEvent(this); + for (ConnectionEventListener listener : listeners) { + listener.connectionClosed(event); // This is likely not the intended event for this action + } + // Marks the pooled connection to be not in use. + inUse = false; + } + + @Override + public synchronized void addConnectionEventListener(ConnectionEventListener listener) { + LOG.finest("++enter++"); + if (listener == null) { + return; + } + if (this.listeners.contains(listener)) { + return; + } + this.listeners.add(listener); + } + + @Override + public synchronized void removeConnectionEventListener(ConnectionEventListener listener) { + LOG.finest("++enter++"); + if (listener == null) { + return; + } + if (!this.listeners.contains(listener)) { + return; + } + this.listeners.remove(listener); + } + + // Method called by the BigQueryPooledConnectionWrapper when the logical + // Connection is closed. + public synchronized void connectionHandleClosed(BigQueryPooledConnectionWrapper handle) { + LOG.finest("++enter++"); + inUse = false; + ConnectionEvent event = new ConnectionEvent(this); + for (ConnectionEventListener listener : listeners) { + listener.connectionClosed(event); + } + LOG.finest("Connection handle returned to the pool."); + } + + // Method to notify listeners about a connection error. This can be called + // by the application if they are using PooledConnection directly or by the + // BigQueryPooledConnectionWrapper when a connection is aborted. + public synchronized void fireConnectionError(SQLException e) { + LOG.finest("++enter++"); + inUse = false; + ConnectionEvent event = new ConnectionEvent(this, e); + for (ConnectionEventListener listener : listeners) { + listener.connectionErrorOccurred(event); + } + LOG.finest("Connection handle removed from the pool due to error: %s", e.getMessage()); + // Listners no longer need to listen for this connection since it has been removed from the + // pool. + for (ConnectionEventListener listener : listeners) { + removeConnectionEventListener(listener); + } + } + + @Override + public void addStatementEventListener(StatementEventListener arg0) { + throw new UnsupportedOperationException( + "Method 'addStatementEventListener' is not supported by the BQ Driver"); + } + + @Override + public void removeStatementEventListener(StatementEventListener arg0) { + throw new UnsupportedOperationException( + "Method 'removeStatementEventListener' is not supported by the BQ Driver"); + } + + // Inner class: Connection Wrapper around the actual physical Connection + // This class notifies the listeners or calls the listner notification methods + // provided by the pooled connection. + static class BigQueryPooledConnectionWrapper implements Connection { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private Connection bqConnectionDelegate; + private BigQueryPooledConnection pooledConnection; + private boolean closed = false; + + public BigQueryPooledConnectionWrapper( + Connection bqConnectionDelegate, BigQueryPooledConnection pooledConnection) { + this.bqConnectionDelegate = bqConnectionDelegate; + this.pooledConnection = pooledConnection; + } + + @Override + public void close() throws SQLException { + LOG.finest("++enter++"); + if (!closed) { + // Instead of physically closing, we notify the PooledConnection + // that this handle is no longer in use. + pooledConnection.connectionHandleClosed(this); + closed = true; + LOG.finest("Logical connection closed (returned to pool)."); + } + } + + @Override + public boolean isClosed() throws SQLException { + return closed || bqConnectionDelegate.isClosed(); + } + + @Override + public java.sql.Statement createStatement() throws SQLException { + return bqConnectionDelegate.createStatement(); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql) throws SQLException { + return bqConnectionDelegate.prepareStatement(sql); + } + + @Override + public java.sql.CallableStatement prepareCall(String sql) throws SQLException { + return bqConnectionDelegate.prepareCall(sql); + } + + @Override + public String nativeSQL(String sql) throws SQLException { + return bqConnectionDelegate.nativeSQL(sql); + } + + @Override + public void setAutoCommit(boolean autoCommit) throws SQLException { + bqConnectionDelegate.setAutoCommit(autoCommit); + } + + @Override + public boolean getAutoCommit() throws SQLException { + return bqConnectionDelegate.getAutoCommit(); + } + + @Override + public void commit() throws SQLException { + bqConnectionDelegate.commit(); + } + + @Override + public void rollback() throws SQLException { + bqConnectionDelegate.rollback(); + } + + @Override + public java.sql.DatabaseMetaData getMetaData() throws SQLException { + return bqConnectionDelegate.getMetaData(); + } + + @Override + public void setReadOnly(boolean readOnly) throws SQLException { + bqConnectionDelegate.setReadOnly(readOnly); + } + + @Override + public boolean isReadOnly() throws SQLException { + return bqConnectionDelegate.isReadOnly(); + } + + @Override + public void setCatalog(String catalog) throws SQLException { + bqConnectionDelegate.setCatalog(catalog); + } + + @Override + public String getCatalog() throws SQLException { + return bqConnectionDelegate.getCatalog(); + } + + @Override + public void setTransactionIsolation(int level) throws SQLException { + bqConnectionDelegate.setTransactionIsolation(level); + } + + @Override + public int getTransactionIsolation() throws SQLException { + return bqConnectionDelegate.getTransactionIsolation(); + } + + @Override + public java.sql.SQLWarning getWarnings() throws SQLException { + return bqConnectionDelegate.getWarnings(); + } + + @Override + public void clearWarnings() throws SQLException { + bqConnectionDelegate.clearWarnings(); + } + + @Override + public java.sql.Statement createStatement(int resultSetType, int resultSetConcurrency) + throws SQLException { + return bqConnectionDelegate.createStatement(resultSetType, resultSetConcurrency); + } + + @Override + public java.sql.PreparedStatement prepareStatement( + String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, resultSetType, resultSetConcurrency); + } + + @Override + public java.sql.CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency) throws SQLException { + return bqConnectionDelegate.prepareCall(sql, resultSetType, resultSetConcurrency); + } + + @Override + public java.util.Map> getTypeMap() throws SQLException { + return bqConnectionDelegate.getTypeMap(); + } + + @Override + public void setTypeMap(java.util.Map> map) throws SQLException { + bqConnectionDelegate.setTypeMap(map); + } + + @Override + public void setHoldability(int holdability) throws SQLException { + bqConnectionDelegate.setHoldability(holdability); + } + + @Override + public int getHoldability() throws SQLException { + return bqConnectionDelegate.getHoldability(); + } + + @Override + public java.sql.Savepoint setSavepoint() throws SQLException { + return bqConnectionDelegate.setSavepoint(); + } + + @Override + public java.sql.Savepoint setSavepoint(String name) throws SQLException { + return bqConnectionDelegate.setSavepoint(name); + } + + @Override + public void rollback(java.sql.Savepoint savepoint) throws SQLException { + bqConnectionDelegate.rollback(savepoint); + } + + @Override + public void releaseSavepoint(java.sql.Savepoint savepoint) throws SQLException { + bqConnectionDelegate.releaseSavepoint(savepoint); + } + + @Override + public java.sql.Statement createStatement( + int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException { + return bqConnectionDelegate.createStatement( + resultSetType, resultSetConcurrency, resultSetHoldability); + } + + @Override + public java.sql.PreparedStatement prepareStatement( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + return bqConnectionDelegate.prepareStatement( + sql, resultSetType, resultSetConcurrency, resultSetHoldability); + } + + @Override + public java.sql.CallableStatement prepareCall( + String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) + throws SQLException { + return bqConnectionDelegate.prepareCall( + sql, resultSetType, resultSetConcurrency, resultSetHoldability); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) + throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, autoGeneratedKeys); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql, int[] columnIndices) + throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, columnIndices); + } + + @Override + public java.sql.PreparedStatement prepareStatement(String sql, String[] columnNames) + throws SQLException { + return bqConnectionDelegate.prepareStatement(sql, columnNames); + } + + @Override + public java.sql.Clob createClob() throws SQLException { + return bqConnectionDelegate.createClob(); + } + + @Override + public java.sql.Blob createBlob() throws SQLException { + return bqConnectionDelegate.createBlob(); + } + + @Override + public java.sql.NClob createNClob() throws SQLException { + return bqConnectionDelegate.createNClob(); + } + + @Override + public java.sql.SQLXML createSQLXML() throws SQLException { + return bqConnectionDelegate.createSQLXML(); + } + + @Override + public boolean isValid(int timeout) throws SQLException { + return bqConnectionDelegate.isValid(timeout); + } + + @Override + public void setClientInfo(String name, String value) throws java.sql.SQLClientInfoException { + bqConnectionDelegate.setClientInfo(name, value); + } + + @Override + public void setClientInfo(java.util.Properties properties) + throws java.sql.SQLClientInfoException { + bqConnectionDelegate.setClientInfo(properties); + } + + @Override + public String getClientInfo(String name) throws SQLException { + return bqConnectionDelegate.getClientInfo(name); + } + + @Override + public java.util.Properties getClientInfo() throws SQLException { + return bqConnectionDelegate.getClientInfo(); + } + + @Override + public java.sql.Array createArrayOf(String typeName, Object[] elements) throws SQLException { + return bqConnectionDelegate.createArrayOf(typeName, elements); + } + + @Override + public java.sql.Struct createStruct(String typeName, Object[] attributes) throws SQLException { + return bqConnectionDelegate.createStruct(typeName, attributes); + } + + @Override + public T unwrap(Class iface) throws SQLException { + return bqConnectionDelegate.unwrap(iface); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + return bqConnectionDelegate.isWrapperFor(iface); + } + + @Override + public void setSchema(String schema) throws SQLException { + bqConnectionDelegate.setSchema(schema); + } + + @Override + public String getSchema() throws SQLException { + return bqConnectionDelegate.getSchema(); + } + + @Override + public void setNetworkTimeout(java.util.concurrent.Executor executor, int milliseconds) + throws SQLException { + bqConnectionDelegate.setNetworkTimeout(executor, milliseconds); + } + + @Override + public int getNetworkTimeout() throws SQLException { + return bqConnectionDelegate.getNetworkTimeout(); + } + + @Override + public void abort(Executor arg0) throws SQLException { + LOG.finest("++enter++"); + if (!closed) { + // We notify the pooled connection that physical connection + // is being aborted. We assume here that abort() is called for + // error cases. + SQLException e = new SQLException("Connection is being terminated and aborted"); + pooledConnection.fireConnectionError(e); + closed = true; + LOG.finest("Logical connection aborted (removed from pool)."); + } + // Call the delate abort to actually close the undelying connection. + bqConnectionDelegate.abort(arg0); + } + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + BigQueryPooledConnection other = (BigQueryPooledConnection) obj; + if (id == null) { + if (other.id != null) return false; + } else if (!id.equals(other.id)) return false; + return true; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPreparedStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPreparedStatement.java new file mode 100644 index 0000000000..abead84b7b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryPreparedStatement.java @@ -0,0 +1,609 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.gax.retrying.RetrySettings; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest; +import com.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import com.google.cloud.bigquery.storage.v1.TableName; +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonObject; +import com.google.protobuf.Descriptors.DescriptorValidationException; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Clob; +import java.sql.Date; +import java.sql.NClob; +import java.sql.ParameterMetaData; +import java.sql.PreparedStatement; +import java.sql.Ref; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.RowId; +import java.sql.SQLException; +import java.sql.SQLXML; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.LinkedList; +import java.util.Queue; + +class BigQueryPreparedStatement extends BigQueryStatement implements PreparedStatement { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private static final char POSITIONAL_PARAMETER_CHAR = '?'; + // Making this protected so BigQueryCallableStatement subclass can access the parameters. + protected final BigQueryParameterHandler parameterHandler; + protected int parameterCount = 0; + protected String currentQuery; + private Queue> batchParameters = new LinkedList<>(); + private Schema insertSchema = null; + private TableName insertTableName = null; + + BigQueryPreparedStatement(BigQueryConnection connection, String query) { + super(connection); + setCurrentQuery(query); + this.parameterHandler = new BigQueryParameterHandler(this.parameterCount); + } + + void setCurrentQuery(String currentQuery) { + this.parameterCount = getParameterCount(currentQuery); + this.currentQuery = currentQuery; + } + + private int getParameterCount(String query) { + LOG.finest("++enter++"); + return (int) query.chars().filter(ch -> ch == POSITIONAL_PARAMETER_CHAR).count(); + } + + @Override + public ResultSet executeQuery() throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(this.currentQuery); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = this.parameterHandler.configureParameters(jobConfiguration); + runQuery(this.currentQuery, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return getCurrentResultSet(); + } + + @Override + public long executeLargeUpdate() throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(this.currentQuery); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = this.parameterHandler.configureParameters(jobConfiguration); + runQuery(this.currentQuery, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return this.currentUpdateCount; + } + + @Override + public int executeUpdate() throws SQLException { + LOG.finest("++enter++"); + return checkUpdateCount(executeLargeUpdate()); + } + + @Override + public boolean execute() throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(this.currentQuery); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = this.parameterHandler.configureParameters(jobConfiguration); + runQuery(this.currentQuery, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return getCurrentResultSet() != null; + } + + @Override + public void clearParameters() { + LOG.finest("++enter++"); + this.parameterHandler.clearParameters(); + this.parameterCount = 0; + } + + @Override + public void setNull(int parameterIndex, int sqlType) { + // TODO(neenu): implement null case + } + + @Override + public void setBoolean(int parameterIndex, boolean x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Boolean.class); + } + + @Override + public void setByte(int parameterIndex, byte x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Byte.class); + } + + @Override + public void setShort(int parameterIndex, short x) { + // TODO(neenu): implement Bytes conversion. + } + + @Override + public void setInt(int parameterIndex, int x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Integer.class); + } + + @Override + public void setLong(int parameterIndex, long x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Long.class); + } + + @Override + public void setFloat(int parameterIndex, float x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Float.class); + } + + @Override + public void setDouble(int parameterIndex, double x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, Double.class); + } + + @Override + public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, BigDecimal.class); + } + + @Override + public void setString(int parameterIndex, String x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x, String.class); + } + + @Override + public void setBytes(int parameterIndex, byte[] x) { + // TODO(neenu): implement Bytes conversion. + } + + @Override + public void setDate(int parameterIndex, Date x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x.toString(), String.class); + } + + @Override + public void setTime(int parameterIndex, Time x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x.toString(), String.class); + } + + @Override + public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { + checkClosed(); + this.parameterHandler.setParameter(parameterIndex, x.toString(), String.class); + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setUnicodeStream(int parameterIndex, InputStream x, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setObject(int parameterIndex, Object x, int targetSqlType) {} + + @Override + public void setObject(int parameterIndex, Object x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void addBatch() { + LOG.finest("++enter++"); + ArrayList currentParameterList = + deepCopyParameterList(this.parameterHandler.parametersList); + this.batchParameters.add(currentParameterList); + } + + private ArrayList deepCopyParameterList( + ArrayList parametersList) { + ArrayList copiedParameterList = new ArrayList<>(); + for (BigQueryJdbcParameter parameter : parametersList) { + BigQueryJdbcParameter newParameter = new BigQueryJdbcParameter(parameter); + copiedParameterList.add(newParameter); + } + return copiedParameterList; + } + + @Override + public int[] executeBatch() throws SQLException { + LOG.finest("++enter++"); + int[] result = new int[this.batchParameters.size()]; + if (this.batchParameters.isEmpty()) { + return result; + } + if (useWriteAPI()) { + try (BigQueryWriteClient writeClient = this.connection.getBigQueryWriteClient()) { + LOG.info("Using Write API for bulk INSERT operation."); + ArrayList currentParameterList = this.batchParameters.peek(); + if (this.insertSchema == null && this.insertTableName == null) { + QueryStatistics insertJobQueryStatistics = + getQueryStatistics(getWriteBatchJobConfiguration(currentParameterList)); + setInsertMetadata(insertJobQueryStatistics); + } + + long rowCount = bulkInsertWithWriteAPI(writeClient); + int[] insertArray = new int[Math.toIntExact(rowCount)]; + Arrays.fill(insertArray, 1); + return insertArray; + + } catch (DescriptorValidationException | IOException | InterruptedException e) { + throw new BigQueryJdbcRuntimeException(e); + } + + } else { + try { + LOG.info("Using individual INSERT query runs."); + int count = this.batchParameters.size(); + StringBuilder combinedQuery = new StringBuilder(); + for (int i = 0; i < count; i++) { + + if (this.currentQuery.trim().endsWith(";")) { + combinedQuery.append(this.currentQuery); + } else { + combinedQuery.append(this.currentQuery).append(";"); + } + } + // executeBatch in PreparedStatement is used for BulkInsert/DML. + // If not correct Type, fails later. + runQuery( + combinedQuery.toString(), getStandardBatchJobConfiguration(combinedQuery.toString())); + int i = 0; + while (getUpdateCount() != -1 && i < count) { + result[i] = getUpdateCount(); + getMoreResults(); + i++; + } + return result; + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + } + + private long bulkInsertWithWriteAPI(BigQueryWriteClient bigQueryWriteClient) + throws DescriptorValidationException, + IOException, + InterruptedException, + BigQueryJdbcException { + LOG.finest("++enter++"); + RetrySettings retrySettings = this.connection.getRetrySettings(); + + BigQueryJdbcBulkInsertWriter bulkInsertWriter = new BigQueryJdbcBulkInsertWriter(); + bulkInsertWriter.initialize(this.insertTableName, bigQueryWriteClient, retrySettings); + + try { + long offset = 0; + JsonArray jsonArray = new JsonArray(); + Gson gson = new Gson(); + int count = this.batchParameters.size(); + for (int i = 0; i < count; i++) { + + ArrayList parameterList = this.batchParameters.poll(); + FieldList fieldLists = this.insertSchema.getFields(); + if (fieldLists.size() == parameterList.size()) { + + JsonObject rowObject = new JsonObject(); + for (int j = 0; j < parameterList.size(); j++) { + BigQueryJdbcParameter parameter = parameterList.get(j); + if (parameter.getSqlType() == StandardSQLTypeName.STRING) { + rowObject.addProperty(fieldLists.get(j).getName(), parameter.getValue().toString()); + } else { + rowObject.addProperty(fieldLists.get(j).getName(), gson.toJson(parameter.getValue())); + } + } + jsonArray.add(rowObject); + + if (jsonArray.size() == this.querySettings.getWriteAPIAppendRowCount() + || this.batchParameters.size() == 0) { + bulkInsertWriter.append(jsonArray, offset); + LOG.finest("Append called "); + offset += jsonArray.size(); + jsonArray = new JsonArray(); + } + } else { + throw new BigQueryJdbcException("Mismatch between field count and parameter count."); + } + } + } catch (BigQueryJdbcException e) { + throw new RuntimeException(e); + } + + long rowCount = bulkInsertWriter.cleanup(bigQueryWriteClient); + + BatchCommitWriteStreamsRequest commitRequest = + BatchCommitWriteStreamsRequest.newBuilder() + .setParent(this.insertTableName.toString()) + .addWriteStreams(bulkInsertWriter.getStreamName()) + .build(); + BatchCommitWriteStreamsResponse commitResponse = + bigQueryWriteClient.batchCommitWriteStreams(commitRequest); + if (commitResponse.hasCommitTime() == false) { + throw new BigQueryJdbcException("Error committing the streams"); + } + LOG.finest("Commit called."); + return rowCount; + } + + private void setInsertMetadata(QueryStatistics statistics) throws SQLException { + LOG.finest("++enter++"); + if (!statistics.getStatementType().equals(StatementType.INSERT) + || statistics.getSchema() == null + || statistics.getReferencedTables().stream().distinct().count() > 1) { + throw new BigQueryJdbcException( + "Use java.sql.Statement.executeBatch() for heterogeneous DML batches"); + } + + this.insertSchema = statistics.getSchema(); + TableId tableID = statistics.getReferencedTables().get(0); + this.insertTableName = + TableName.of(tableID.getProject(), tableID.getDataset(), tableID.getTable()); + LOG.finest( + "this.insertTableName : %s, this.insertSchema : %s", + this.insertTableName, this.insertSchema.toString()); + } + + QueryJobConfiguration getWriteBatchJobConfiguration( + ArrayList currentParameterList) throws SQLException { + LOG.finest("++enter++"); + BigQueryParameterHandler batchHandler = + new BigQueryParameterHandler(this.parameterCount, currentParameterList); + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(this.currentQuery); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration = batchHandler.configureParameters(jobConfiguration); + return jobConfiguration.build(); + } + + QueryJobConfiguration getStandardBatchJobConfiguration(String query) throws SQLException { + LOG.finest("++enter++"); + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(query); + jobConfiguration.setParameterMode("POSITIONAL"); + jobConfiguration.setPriority(QueryJobConfiguration.Priority.BATCH); + int index = 0; + while (!this.batchParameters.isEmpty()) { + ArrayList parameterList = this.batchParameters.poll(); + + for (BigQueryJdbcParameter parameter : parameterList) { + Object parameterValue = parameter.getValue(); + StandardSQLTypeName sqlType = parameter.getSqlType(); + LOG.finest( + "Parameter %s of type %s at index %s added to QueryJobConfiguration", + parameterValue, sqlType, index++); + jobConfiguration.addPositionalParameter(QueryParameterValue.of(parameterValue, sqlType)); + } + } + return jobConfiguration.build(); + } + + Boolean useWriteAPI() { + LOG.finest("++enter++"); + if (this.querySettings.isUseWriteAPI()) { + if (this.batchParameters.size() >= this.querySettings.getWriteAPIActivationRowCount()) { + return true; + } + } + return false; + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader, int length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setRef(int parameterIndex, Ref x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBlob(int parameterIndex, Blob x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setClob(int parameterIndex, Clob x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setArray(int parameterIndex, Array x) { + // TODO(neenu) :IMPLEMENT ARRAY + } + + @Override + public ResultSetMetaData getMetaData() { + // TODO(neenu) :IMPLEMENT metadata + return null; + } + + @Override + public void setDate(int parameterIndex, Date x, Calendar cal) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setTime(int parameterIndex, Time x, Calendar cal) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNull(int parameterIndex, int sqlType, String typeName) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setURL(int parameterIndex, URL x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public ParameterMetaData getParameterMetaData() { + // TODO(neenu) :IMPLEMENT + return null; + } + + @Override + public void setRowId(int parameterIndex, RowId x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNString(int parameterIndex, String value) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNCharacterStream(int parameterIndex, Reader value, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNClob(int parameterIndex, NClob value) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setClob(int parameterIndex, Reader reader, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBlob(int parameterIndex, InputStream inputStream, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNClob(int parameterIndex, Reader reader, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setSQLXML(int parameterIndex, SQLXML xmlObject) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) { + // TODO(neenu) : IMPLEMENT? + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader, long length) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setAsciiStream(int parameterIndex, InputStream x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBinaryStream(int parameterIndex, InputStream x) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setCharacterStream(int parameterIndex, Reader reader) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNCharacterStream(int parameterIndex, Reader value) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setClob(int parameterIndex, Reader reader) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setBlob(int parameterIndex, InputStream inputStream) { + // TODO :NOT IMPLEMENTED + } + + @Override + public void setNClob(int parameterIndex, Reader reader) { + // TODO :NOT IMPLEMENTED + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSet.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSet.java new file mode 100644 index 0000000000..c24e37abdd --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSet.java @@ -0,0 +1,46 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; + +public interface BigQueryResultSet { + /* + * This function returns Job Id for the corresponding BQ Job that generated result. + * Note that it is not available for certain queries (low-latency queries) and for metadata results. + * + * @return JobId object or null. + */ + public JobId getJobId(); + + /* + * This function returns Query Id for the corresponding low-latency query produced results. + * It is null for regular (non-low latency) jobs and metadata results. + * + * @return Query Id string or null. + */ + public String getQueryId(); + + /* + * Returns com.google.cloud.bigquery.JobStatistics.QueryStatistics object with statistics for the + * completed Job for non-low latency queries. + * + * @return QueryStatistics object or null. + */ + public QueryStatistics getQueryStatistics(); +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizers.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizers.java new file mode 100644 index 0000000000..15a1cca349 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizers.java @@ -0,0 +1,73 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.lang.ref.PhantomReference; +import java.lang.ref.ReferenceQueue; + +@InternalApi +class BigQueryResultSetFinalizers { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryResultSetFinalizers.class.getName()); + + @InternalApi + static class ArrowResultSetFinalizer extends PhantomReference { + Thread ownedThread; + + public ArrowResultSetFinalizer( + BigQueryArrowResultSet referent, + ReferenceQueue q, + Thread ownedThread) { + super(referent, q); + this.ownedThread = ownedThread; + } + + // Free resources. Remove all the hard refs + public void finalizeResources() { + LOG.finest("++enter++"); + if (ownedThread != null && !ownedThread.isInterrupted()) { + ownedThread.interrupt(); + } + } + } + + @InternalApi + static class JsonResultSetFinalizer extends PhantomReference { + Thread[] ownedThreads; + + public JsonResultSetFinalizer( + BigQueryJsonResultSet referent, + ReferenceQueue q, + Thread[] ownedThreads) { + super(referent, q); + this.ownedThreads = ownedThreads; + } + + // Free resources. Remove all the hard refs + public void finalizeResources() { + LOG.finest("++enter++"); + if (ownedThreads != null) { + for (Thread ownedThread : ownedThreads) { + if (!ownedThread.isInterrupted()) { + ownedThread.interrupt(); + } + } + } + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadata.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadata.java new file mode 100644 index 0000000000..d18c689333 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadata.java @@ -0,0 +1,213 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; + +/** This class returns ResultSetMetadata for the JSON and the Arrow ResultSets */ +class BigQueryResultSetMetadata implements ResultSetMetaData { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private final FieldList schemaFieldList; + private final Statement statement; + private final int columnCount; + + private static final int DEFAULT_DISPLAY_SIZE = 50; + + private BigQueryResultSetMetadata(FieldList schemaFieldList, Statement statement) { + LOG.finest("++enter++"); + this.schemaFieldList = schemaFieldList; + this.columnCount = schemaFieldList.size(); + this.statement = statement; + } + + static BigQueryResultSetMetadata of(FieldList schemaFieldList, Statement statement) { + return new BigQueryResultSetMetadata(schemaFieldList, statement); + } + + private Field getField(int sqlColumn) { + return this.schemaFieldList.get(sqlColumn - 1); + } + + @Override + public int getColumnCount() { + return this.columnCount; + } + + @Override + public boolean isAutoIncrement(int column) { + // BQ doesn't support auto increment + return false; + } + + @Override + public boolean isCaseSensitive(int column) { + int colType = getColumnType(column); + return colType == Types.NVARCHAR; + } + + @Override + public boolean isSearchable(int column) { + int colType = getColumnType(column); + return colType != Types.OTHER; + } + + @Override + public boolean isCurrency(int column) { + return false; + } + + @Override + public int isNullable(int column) { + Mode colMode = getField(column).getMode(); + if (colMode == null) { + return ResultSetMetaData.columnNullableUnknown; + } + return colMode == Mode.NULLABLE + ? ResultSetMetaData.columnNullable + : ResultSetMetaData.columnNoNulls; + } + + @Override + public boolean isSigned(int column) { + int colType = getColumnType(column); + return colType == Types.FLOAT + || colType == Types.DOUBLE + || colType == Types.BIGINT + || colType == Types.NUMERIC; + } + + @Override + public int getColumnDisplaySize(int column) { + int colType = getColumnType(column); + switch (colType) { + case Types.BOOLEAN: + return 5; + case Types.DATE: + case Types.BIGINT: + return 10; + case Types.DOUBLE: + case Types.DECIMAL: + case Types.NUMERIC: + return 14; + case Types.TIMESTAMP: + return 16; + default: + return DEFAULT_DISPLAY_SIZE; + } + } + + @Override + public String getColumnLabel(int column) { + return getField(column).getName(); + } + + @Override + public String getColumnName(int column) { + return getField(column).getName(); + } + + @Override + public int getPrecision(int column) { + return (int) (getField(column).getPrecision() != null ? getField(column).getPrecision() : 0); + } + + @Override + public int getScale(int column) { + return (int) (getField(column).getScale() != null ? getField(column).getScale() : 0); + } + + @Override + public String getTableName(int column) { + // returning "" as per the specs as there might be multiple tables involved, or we + // might be reading from the temp table + return ""; + } + + @Override + public String getCatalogName(int column) { + return ""; // not applicable + } + + @Override + public String getSchemaName(int column) { + return ""; // not applicable + } + + private StandardSQLTypeName getStandardSQLTypeName(int column) { + Field field = getField(column); + if (field.getMode() == Mode.REPEATED) { + return StandardSQLTypeName.ARRAY; + } + return getField(column).getType().getStandardType(); + } + + @Override + public int getColumnType(int column) { + return BigQueryJdbcTypeMappings.standardSQLToJavaSqlTypesMapping.get( + getStandardSQLTypeName(column)); + } + + @Override + public String getColumnTypeName(int column) { + return getStandardSQLTypeName(column).name(); + } + + @Override + public boolean isReadOnly(int column) { + return false; + } + + @Override + public boolean isWritable(int column) { + return !isReadOnly(column); + } + + @Override + public boolean isDefinitelyWritable(int column) { + return false; + } + + @Override + public String getColumnClassName(int column) { + Field field = getField(column); + if (field.getMode() == Mode.REPEATED) { + return java.sql.Array.class.getName(); + } + return BigQueryJdbcTypeMappings.standardSQLToJavaTypeMapping + .get(field.getType().getStandardType()) + .getName(); + } + + // Unsupported methods: + @Override + public T unwrap(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException("unwrap is not implemented"); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + throw new BigQueryJdbcSqlFeatureNotSupportedException("isWrapperFor is not implemented"); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySettings.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySettings.java new file mode 100644 index 0000000000..16f13a7784 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySettings.java @@ -0,0 +1,857 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.Clustering; +import com.google.cloud.bigquery.ConnectionProperty; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.EncryptionConfiguration; +import com.google.cloud.bigquery.ExternalTableDefinition; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.Priority; +import com.google.cloud.bigquery.RangePartitioning; +import com.google.cloud.bigquery.TimePartitioning; +import com.google.cloud.bigquery.UserDefinedFunction; +import java.util.List; +import java.util.Map; + +/** This class is used to pass user defined settings for execution of Queries. */ +// TODO: Expose this class as public once we decide on how to expose the slow +// query path to the end users. IMP: revisit the set of params to be exposed via BigQuerySettings +class BigQuerySettings { + + private final boolean useReadAPI; + private final int highThroughputActivationRatio; + private final int highThroughputMinTableSize; + private final boolean unsupportedHTAPIFallback; + + private final boolean enableSession; + + private final ConnectionProperty sessionInfoConnectionProperty; + + private final boolean useQueryCache; + private final String queryDialect; + private final List queryProperties; + private final Boolean allowLargeResults; + private final String kmsKeyName; + private final Clustering clustering; + + private final JobInfo.CreateDisposition createDisposition; + + private final EncryptionConfiguration destinationEncryptionConfiguration; + + private final String destinationTable; + private final String destinationDataset; + private final long destinationDatasetExpirationTime; + + private final long jobTimeoutMs; + + private final int maximumBillingTier; + + private final QueryJobConfiguration.Priority priority; + + private final RangePartitioning rangePartitioning; + + private final List schemaUpdateOptions; + + private final Map tableDefinitions; + + private final TimePartitioning timePartitioning; + + private final List userDefinedFunctions; + + private final JobInfo.WriteDisposition writeDisposition; + + private final int numBufferedRows; + + private final long maxResultPerPage; + + private final DatasetId defaultDataset; + + private final boolean useWriteAPI; + private final int writeAPIActivationRowCount; + private final int writeAPIAppendRowCount; + + private final long maxBytesBilled; + private final Map labels; + + private BigQuerySettings(Builder builder) { + this.useReadAPI = builder.useReadAPI; + this.highThroughputActivationRatio = builder.highThroughputActivationRatio; + this.highThroughputMinTableSize = builder.highThroughputMinTableSize; + this.useQueryCache = builder.useQueryCache; + this.queryDialect = builder.queryDialect; + this.queryProperties = builder.queryProperties; + this.allowLargeResults = builder.allowLargeResults; + this.kmsKeyName = builder.kmsKeyName; + this.clustering = builder.clustering; + this.createDisposition = builder.createDisposition; + this.destinationEncryptionConfiguration = builder.destinationEncryptionConfiguration; + this.destinationTable = builder.destinationTable; + this.destinationDataset = builder.destinationDataset; + this.destinationDatasetExpirationTime = builder.destinationDatasetExpirationTime; + this.jobTimeoutMs = builder.jobTimeoutMs; + this.maximumBillingTier = builder.maximumBillingTier; + this.priority = builder.priority; + this.rangePartitioning = builder.rangePartitioning; + this.schemaUpdateOptions = builder.schemaUpdateOptions; + this.tableDefinitions = builder.tableDefinitions; + this.timePartitioning = builder.timePartitioning; + this.userDefinedFunctions = builder.userDefinedFunctions; + this.writeDisposition = builder.writeDisposition; + this.numBufferedRows = builder.numBufferedRows; + this.maxResultPerPage = builder.maxResultPerPage; + this.defaultDataset = builder.defaultDataset; + this.enableSession = builder.enableSession; + this.unsupportedHTAPIFallback = builder.unsupportedHTAPIFallback; + this.sessionInfoConnectionProperty = builder.sessionInfoConnectionProperty; + this.useWriteAPI = builder.useWriteAPI; + this.writeAPIActivationRowCount = builder.writeAPIActivationRowCount; + this.writeAPIAppendRowCount = builder.writeAPIAppendRowCount; + this.maxBytesBilled = builder.maxBytesBilled; + this.labels = builder.labels; + } + + /** Returns a builder for a BigQuerySettings object. */ + static Builder newBuilder() { + return new Builder(); + } + + /** + * Returns useReadAPI flag, enabled by default. Read API will be used if the underlying conditions + * are satisfied and this flag is enabled + */ + Boolean getUseReadAPI() { + return useReadAPI; + } + + /** + * Returns integer value for when the connector switches to BigQuery Storage API when the number + * of pages and rows in query results exceed this value and HighThroughPutMinTableSize, + * respectively. + */ + int getHighThroughputActivationRatio() { + return highThroughputActivationRatio; + } + + /** + * Returns integer value for when query results are large, exceeding both row and page limits, the + * connector switches to the BigQuery Storage API for faster processing. + */ + int getHighThroughputMinTableSize() { + return highThroughputMinTableSize; + } + + /** + * Determines if session features are enabled. + * + *

    Enabling session-level features allows for capturing SQL activities or enabling + * multi-statement transactions. Session tracking is disabled by default. + * + * @return true if session is enabled, false otherwise. + */ + boolean isEnableSession() { + return enableSession; + } + + /** + * When the connector uses fetch workflows not supported on the High-Throughput API, this option + * specifies whether the connector falls back to the REST API or returns an error. By default it + * falls back to standard API. + * + * @return true if falls back to standard, false to error. + */ + boolean isUnsupportedHTAPIFallback() { + return unsupportedHTAPIFallback; + } + + /** + * Returns information about the BigQuery session ConnectionProperty associated with this job. + * + *

    BigQuery's sessions provide a way to link multiple jobs and maintain temporary data, such as + * temporary tables, between them. They are needed for using multi-statement transactions that + * span across multiple queries. + * + * @return An instance of {@link ConnectionProperty} containing session details, or {@code null} + * if this job is not part of a session. + */ + ConnectionProperty getSessionInfoConnectionProperty() { + return sessionInfoConnectionProperty; + } + + Boolean getUseQueryCache() { + return useQueryCache; + } + + String getQueryDialect() { + return queryDialect; + } + + List getQueryProperties() { + return this.queryProperties; + } + + /** + * Returns the KMS resource name which is the unique identifier you give to your encryption key in + * Google Cloud's Key Management Service (KMS). Tells BigQuery which key to use when encrypting or + * decrypting your data. + */ + String getKmsKeyName() { + return kmsKeyName; + } + + Boolean getAllowLargeResults() { + return allowLargeResults; + } + + /** Returns the clustering specification for the destination table. */ + Clustering getClustering() { + return clustering; + } + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + JobInfo.CreateDisposition getCreateDisposition() { + return createDisposition; + } + + /** Returns the custom encryption configuration (e.g., Cloud KMS keys) */ + EncryptionConfiguration getDestinationEncryptionConfiguration() { + return destinationEncryptionConfiguration; + } + + /** + * Returns the table where you want to store query results. If not provided a default temp table + * is created when needed. + */ + String getDestinationTable() { + return destinationTable; + } + + /** + * Returns the dataset where you want to store query results. If not provided a default dataset is + * created when needed. + */ + String getDestinationDataset() { + return destinationDataset; + } + + long getDestinationDatasetExpirationTime() { + return destinationDatasetExpirationTime; + } + + /** Returns the timeout associated with this job */ + Long getJobTimeoutMs() { + return jobTimeoutMs; + } + + /** Returns the optional billing tier limit for this job. */ + Integer getMaximumBillingTier() { + return maximumBillingTier; + } + + /** Returns the query priority. */ + QueryJobConfiguration.Priority getPriority() { + return priority; + } + + /** Returns the range partitioning specification for the table */ + RangePartitioning getRangePartitioning() { + return rangePartitioning; + } + + /** + * Returns options allowing the schema of the destination table to be updated as a side effect of + * the query job. Schema update options are supported in two cases: when writeDisposition is + * WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a partition + * of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE will always + * overwrite the schema. + */ + List getSchemaUpdateOptions() { + return schemaUpdateOptions; + } + + /** + * Returns the external tables definitions. If querying external data sources outside BigQuery, + * this value describes the data format, location and other properties of the data sources. By + * defining these properties, the data sources can be queried as if they were standard BigQuery + * tables. + */ + Map getTableDefinitions() { + return tableDefinitions; + } + + /** Returns the time partitioning specification for the destination table. */ + TimePartitioning getTimePartitioning() { + return timePartitioning; + } + + /** + * Returns user defined function resources that can be used by this query. Function resources can + * either be defined inline ({@link UserDefinedFunction.Type#INLINE}) or loaded from a Google + * Cloud Storage URI ({@link UserDefinedFunction.Type#FROM_URI}. + */ + List getUserDefinedFunctions() { + return userDefinedFunctions; + } + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + JobInfo.WriteDisposition getWriteDisposition() { + return writeDisposition; + } + + /** Returns the number of rows of data to pre-fetch */ + Integer getNumBufferedRows() { + return numBufferedRows; + } + + Long getMaxResultPerPage() { + return maxResultPerPage; + } + + DatasetId getDefaultDataset() { + return defaultDataset; + } + + boolean isUseWriteAPI() { + return useWriteAPI; + } + + int getWriteAPIActivationRowCount() { + return writeAPIActivationRowCount; + } + + int getWriteAPIAppendRowCount() { + return writeAPIAppendRowCount; + } + + long getMaxBytesBilled() { + return maxBytesBilled; + } + + Map getLabels() { + return labels; + } + + @Override + public String toString() { + return "BigQuerySettings{" + + "enableSession=" + + enableSession + + ", " + + "unsupportedHTAPIFallback=" + + unsupportedHTAPIFallback + + ", " + + "sessionInfo=" + + sessionInfoConnectionProperty + + ", " + + "useReadAPI=" + + useReadAPI + + ", " + + "kmsKeyName=" + + kmsKeyName + + ", " + + "highThroughputMinTableSize=" + + highThroughputMinTableSize + + ", " + + "highThroughputActivationRatio=" + + highThroughputActivationRatio + + ", " + + "useQueryCache=" + + useQueryCache + + ", " + + "queryDialect=" + + queryDialect + + ", " + + "queryProperties=" + + queryProperties + + ", " + + "allowLargeResults=" + + allowLargeResults + + ", " + + "clustering=" + + clustering + + ", " + + "createDisposition=" + + createDisposition + + ", " + + "destinationEncryptionConfiguration=" + + destinationEncryptionConfiguration + + ", " + + "destinationTable=" + + destinationTable + + ", " + + "destinationDataset=" + + destinationDataset + + ", " + + "destinationDatasetExpirationTime=" + + destinationDatasetExpirationTime + + ", " + + "jobTimeoutMs=" + + jobTimeoutMs + + ", " + + "maximumBillingTier=" + + maximumBillingTier + + ", " + + "priority=" + + priority + + ", " + + "rangePartitioning=" + + rangePartitioning + + ", " + + "schemaUpdateOptions=" + + schemaUpdateOptions + + ", " + + "tableDefinitions=" + + tableDefinitions + + ", " + + "timePartitioning=" + + timePartitioning + + ", " + + "userDefinedFunctions=" + + userDefinedFunctions + + ", " + + "writeDisposition=" + + writeDisposition + + ", " + + "numBufferedRows=" + + numBufferedRows + + ", " + + "maxResultPerPage=" + + maxResultPerPage + + ", " + + "defaultDataset=" + + defaultDataset + + ", " + + "useWriteAPI=" + + useWriteAPI + + ", " + + "writeAPIActivationRowCount=" + + writeAPIActivationRowCount + + ", " + + "writeAPIAppendRowCount=" + + writeAPIAppendRowCount + + ", " + + "maxBytesBilled=" + + maxBytesBilled + + "}"; + } + + /** Returns a builder pre-populated using the current values of this field. */ + Builder toBuilder() { + return new Builder(this); + } + + static final class Builder { + + private boolean useReadAPI; + private int highThroughputMinTableSize; + private int highThroughputActivationRatio; + private boolean enableSession; + private boolean unsupportedHTAPIFallback; + private ConnectionProperty sessionInfoConnectionProperty; + private boolean useQueryCache; + private String queryDialect; + private List queryProperties; + private Boolean allowLargeResults; + private String kmsKeyName; + private Clustering clustering; + private JobInfo.CreateDisposition createDisposition; + private EncryptionConfiguration destinationEncryptionConfiguration; + private String destinationTable; + private String destinationDataset; + private long destinationDatasetExpirationTime; + private long jobTimeoutMs; + private int maximumBillingTier; + private QueryJobConfiguration.Priority priority; + private RangePartitioning rangePartitioning; + private List schemaUpdateOptions; + private Map tableDefinitions; + private TimePartitioning timePartitioning; + private List userDefinedFunctions; + private JobInfo.WriteDisposition writeDisposition; + private int numBufferedRows; + private long maxResultPerPage; + private DatasetId defaultDataset; + private boolean useWriteAPI; + private int writeAPIActivationRowCount; + private int writeAPIAppendRowCount; + private long maxBytesBilled; + private Map labels; + + Builder() { + this.withDefaultValues(); + } + + private Builder(BigQuerySettings querySettings) { + this.useReadAPI = querySettings.getUseReadAPI(); + this.highThroughputMinTableSize = querySettings.getHighThroughputMinTableSize(); + this.highThroughputActivationRatio = querySettings.getHighThroughputActivationRatio(); + this.enableSession = querySettings.isEnableSession(); + this.unsupportedHTAPIFallback = querySettings.isUnsupportedHTAPIFallback(); + this.sessionInfoConnectionProperty = querySettings.getSessionInfoConnectionProperty(); + this.useQueryCache = querySettings.getUseQueryCache(); + this.queryDialect = querySettings.getQueryDialect(); + this.queryProperties = querySettings.getQueryProperties(); + this.allowLargeResults = querySettings.getAllowLargeResults(); + this.kmsKeyName = querySettings.getKmsKeyName(); + this.clustering = querySettings.getClustering(); + this.createDisposition = querySettings.getCreateDisposition(); + this.destinationEncryptionConfiguration = + querySettings.getDestinationEncryptionConfiguration(); + this.destinationTable = querySettings.getDestinationTable(); + this.destinationDataset = querySettings.getDestinationDataset(); + this.destinationDatasetExpirationTime = querySettings.destinationDatasetExpirationTime; + this.jobTimeoutMs = querySettings.getJobTimeoutMs(); + this.maximumBillingTier = querySettings.getMaximumBillingTier(); + this.priority = querySettings.getPriority(); + this.rangePartitioning = querySettings.getRangePartitioning(); + this.schemaUpdateOptions = querySettings.getSchemaUpdateOptions(); + this.tableDefinitions = querySettings.getTableDefinitions(); + this.timePartitioning = querySettings.getTimePartitioning(); + this.userDefinedFunctions = querySettings.getUserDefinedFunctions(); + this.writeDisposition = querySettings.getWriteDisposition(); + this.numBufferedRows = querySettings.getNumBufferedRows(); + this.maxResultPerPage = querySettings.getMaxResultPerPage(); + this.defaultDataset = querySettings.getDefaultDataset(); + this.useWriteAPI = querySettings.isUseWriteAPI(); + this.writeAPIActivationRowCount = querySettings.getWriteAPIActivationRowCount(); + this.writeAPIAppendRowCount = querySettings.getWriteAPIAppendRowCount(); + this.maxBytesBilled = querySettings.getMaxBytesBilled(); + this.labels = querySettings.getLabels(); + } + + Builder withDefaultValues() { + return setUseReadAPI(false) // Read API is disabled by default; + .setQueryDialect(BigQueryJdbcUrlUtility.DEFAULT_QUERY_DIALECT_VALUE) + .setNumBufferedRows(10000) // 10K records will be kept in the buffer (Blocking Queue); + .setMaxResultPerPage(BigQueryJdbcUrlUtility.DEFAULT_MAX_RESULTS_VALUE); + } + + /** + * Sets useReadAPI flag, enabled by default. Read API will be used if the underlying conditions + * are satisfied and this flag is enabled + * + * @param useReadAPI or {@code true} for none + */ + Builder setUseReadAPI(boolean useReadAPI) { + this.useReadAPI = useReadAPI; + return this; + } + + /** + * Sets the minimum table size for which the BigQuery Storage API will be used. + * + *

    When query results are large, exceeding both the row and page limits, the connector + * automatically switches to the BigQuery Storage API for faster processing. This method allows + * you to configure a threshold for table size, enabling the use of the BigQuery Storage API + * when the limit is exceeded, provided the table size exceeds the specified value. + * + * @param highThroughputMinTableSize the minimum table size to trigger the use of the BigQuery + * Storage API + */ + Builder setHighThroughputMinTableSize(int highThroughputMinTableSize) { + this.highThroughputMinTableSize = highThroughputMinTableSize; + return this; + } + + /** + * Sets the activation ratio for switching to the BigQuery Storage API. + * + *

    The connector switches to the BigQuery Storage API when the number of pages in the query + * results exceeds this value AND the table size is greater than or equal to the value set or + * default value of {@link #setHighThroughputMinTableSize(int)}. + * + * @param highThroughputActivationRatio the activation ratio for switching to BigQuery Storage + * API + */ + Builder setHighThroughputActivationRatio(int highThroughputActivationRatio) { + this.highThroughputActivationRatio = highThroughputActivationRatio; + return this; + } + + /** + * setting true, enables session-level features such as capturing SQL activities or enabling + * multi-statement transactions. Session tracking is disabled by default. + */ + Builder setEnableSession(boolean enableSession) { + this.enableSession = enableSession; + return this; + } + + /** + * When the connector uses fetch workflows not supported on the High-Throughput API, this option + * specifies whether the connector falls back to the REST API or returns an error. By default it + * falls back to standard API. + * + * @param unsupportedHTAPIFallback true if falls back to standard, false to error. + */ + Builder setUnsupportedHTAPIFallback(boolean unsupportedHTAPIFallback) { + this.unsupportedHTAPIFallback = unsupportedHTAPIFallback; + return this; + } + + /** + * Setting session information associated with the job. + * + *

    BigQuery's sessions provide a way to link multiple jobs and maintain temporary data, such + * as temporary tables, between them. They are needed for using multi-statement transactions + * that span across multiple queries. + */ + Builder setSessionInfoConnectionProperty(ConnectionProperty sessionInfoConnectionProperty) { + this.sessionInfoConnectionProperty = sessionInfoConnectionProperty; + return this; + } + + Builder setUseQueryCache(boolean useQueryCache) { + this.useQueryCache = useQueryCache; + return this; + } + + Builder setAllowLargeResults(Boolean allowLargeResults) { + this.allowLargeResults = allowLargeResults; + return this; + } + + /** + * Set the KMS resource key name which is the unique identifier you give to your encryption key + * in Google Cloud's Key Management Service (KMS). Tells BigQuery which key to use when + * encrypting or decrypting your data. + */ + Builder setKmsKeyName(String kmsKeyName) { + this.kmsKeyName = kmsKeyName; + return this; + } + + Builder setQueryDialect(String queryDialect) { + this.queryDialect = queryDialect; + return this; + } + + Builder setQueryProperties(List queryProperties) { + this.queryProperties = queryProperties; + return this; + } + + /** Sets the clustering specification for the destination table. */ + Builder setClustering(Clustering clustering) { + this.clustering = clustering; + return this; + } + + /** + * Sets whether the job is allowed to create tables. + * + * @see + * Create Disposition + */ + Builder setCreateDisposition(JobInfo.CreateDisposition createDisposition) { + this.createDisposition = createDisposition; + return this; + } + + /** + * Sets the custom encryption configuration (e.g., Cloud KMS keys). + * + * @param destinationEncryptionConfiguration destinationEncryptionConfiguration or {@code null} + * for none + */ + Builder setDestinationEncryptionConfiguration( + EncryptionConfiguration destinationEncryptionConfiguration) { + this.destinationEncryptionConfiguration = destinationEncryptionConfiguration; + return this; + } + + Builder setDestinationTable(String destinationTable) { + this.destinationTable = destinationTable; + return this; + } + + Builder setDestinationDataset(String destinationDataset) { + this.destinationDataset = destinationDataset; + return this; + } + + Builder setDestinationDatasetExpirationTime(long destinationDatasetExpirationTime) { + this.destinationDatasetExpirationTime = destinationDatasetExpirationTime; + return this; + } + + /** + * [Optional] Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt + * to terminate the job. + * + * @param jobTimeoutMs jobTimeoutMs or {@code null} for none + */ + Builder setJobTimeoutMs(long jobTimeoutMs) { + this.jobTimeoutMs = jobTimeoutMs; + return this; + } + + /** + * Limits the billing tier for this job. Queries that have resource usage beyond this tier will + * fail (without incurring a charge). If unspecified, this will be set to your project default. + * + * @param maximumBillingTier maximum billing tier for this job + */ + Builder setMaximumBillingTier(int maximumBillingTier) { + this.maximumBillingTier = maximumBillingTier; + return this; + } + + /** + * Sets a priority for the query. If not specified the priority is assumed to be {@link + * Priority#INTERACTIVE}. + */ + Builder setPriority(QueryJobConfiguration.Priority priority) { + this.priority = priority; + return this; + } + + /** + * Range partitioning specification for this table. Only one of timePartitioning and + * rangePartitioning should be specified. + * + * @param rangePartitioning rangePartitioning or {@code null} for none + */ + Builder setRangePartitioning(RangePartitioning rangePartitioning) { + this.rangePartitioning = rangePartitioning; + return this; + } + + /** + * Sets options allowing the schema of the destination table to be updated as a side effect of + * the query job. Schema update options are supported in two cases: when writeDisposition is + * WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a + * partition of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE + * will always overwrite the schema. + */ + Builder setSchemaUpdateOptions(List schemaUpdateOptions) { + this.schemaUpdateOptions = schemaUpdateOptions; + return this; + } + + /** + * Sets the external tables definitions. If querying external data sources outside BigQuery, + * this value describes the data format, location and other properties of the data sources. By + * defining these properties, the data sources can be queried as if they were standard BigQuery + * tables. + */ + Builder setTableDefinitions(Map tableDefinitions) { + this.tableDefinitions = tableDefinitions; + return this; + } + + /** Sets the time partitioning specification for the destination table. */ + Builder setTimePartitioning(TimePartitioning timePartitioning) { + this.timePartitioning = timePartitioning; + return this; + } + + /** + * Sets user defined function resources that can be used by this query. Function resources can + * either be defined inline ({@link UserDefinedFunction#inline(String)}) or loaded from a Google + * Cloud Storage URI ({@link UserDefinedFunction#fromUri(String)}. + */ + Builder setUserDefinedFunctions(List userDefinedFunctions) { + this.userDefinedFunctions = userDefinedFunctions; + return this; + } + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + Builder setWriteDisposition(JobInfo.WriteDisposition writeDisposition) { + this.writeDisposition = writeDisposition; + return this; + } + + /** + * Sets the number of rows in the buffer (a blocking queue) that query results are consumed + * from. + * + * @param numBufferedRows numBufferedRows or {@code null} for none + */ + Builder setNumBufferedRows(int numBufferedRows) { + this.numBufferedRows = numBufferedRows; + return this; + } + + /** + * Sets the maximum records per page to be used for pagination. This is used as an input for the + * tabledata.list and jobs.getQueryResults RPC calls + * + * @param maxResultPerPage + */ + Builder setMaxResultPerPage(long maxResultPerPage) { + this.maxResultPerPage = maxResultPerPage; + return this; + } + + Builder setDefaultDataset(DatasetId defaultDataset) { + this.defaultDataset = defaultDataset; + return this; + } + + Builder setUseWriteAPI(boolean useWriteAPI) { + this.useWriteAPI = useWriteAPI; + return this; + } + + Builder setWriteAPIActivationRowCount(int writeAPIActivationRowCount) { + this.writeAPIActivationRowCount = writeAPIActivationRowCount; + return this; + } + + Builder setWriteAPIAppendRowCount(int writeAPIAppendRowCount) { + this.writeAPIAppendRowCount = writeAPIAppendRowCount; + return this; + } + + Builder setMaxBytesBilled(long maxBytesBilled) { + this.maxBytesBilled = maxBytesBilled; + return this; + } + + Builder setLabels(Map labels) { + this.labels = labels; + return this; + } + + /** Creates a {@code BigQuerySettings} object. */ + BigQuerySettings build() { + return new BigQuerySettings(this); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySqlTypeConverter.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySqlTypeConverter.java new file mode 100644 index 0000000000..cfdc64a14e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQuerySqlTypeConverter.java @@ -0,0 +1,81 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.jdbc.BigQueryStatement.SqlType; + +class BigQuerySqlTypeConverter { + + static SqlType getSqlTypeFromStatementType(StatementType statementType) { + switch (statementType.toString()) { + case "SELECT": + return SqlType.SELECT; + + case "INSERT": + case "UPDATE": + case "DELETE": + case "MERGE": + return SqlType.DML; + case "CALL": + return SqlType.DML_EXTRA; + + case "CREATE_TABLE": + case "CREATE_TABLE_AS_SELECT": + case "CREATE_VIEW": + case "CREATE_MODEL": + case "CREATE_MATERIALIZED_VIEW": + case "CREATE_FUNCTION": + case "CREATE_TABLE_FUNCTION": + case "CREATE_PROCEDURE": + case "CREATE_ROW_ACCESS_POLICY": + case "CREATE_SCHEMA": + case "CREATE_SNAPSHOT_TABLE": + case "CREATE_SEARCH_INDEX": + case "DROP_TABLE": + case "DROP_EXTERNAL_TABLE": + case "DROP_VIEW": + case "DROP_MODEL": + case "DROP_MATERIALIZED_VIEW": + case "DROP_FUNCTION": + case "DROP_TABLE_FUNCTION": + case "DROP_PROCEDURE": + case "DROP_SEARCH_INDEX": + case "DROP_SCHEMA": + case "DROP_SNAPSHOT_TABLE": + case "DROP_ROW_ACCESS_POLICY": + case "ALTER_TABLE": + case "ALTER_VIEW": + case "ALTER_MATERIALIZED_VIEW": + case "ALTER_SCHEMA": + case "TRUNCATE_TABLE": + case "CREATE_EXTERNAL_TABLE": + return SqlType.DDL; + case "SCRIPT": + return SqlType.SCRIPT; + case "BEGIN_TRANSACTION": + case "COMMIT_TRANSACTION": + case "ROLLBACK_TRANSACTION": + return SqlType.TCL; + case "EXPORT_DATA": + case "EXPORT_MODEL": + case "LOAD_DATA": + default: + return SqlType.OTHER; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryStatement.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryStatement.java new file mode 100644 index 0000000000..ca579d1d0c --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryStatement.java @@ -0,0 +1,1576 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.api.gax.paging.Page; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQuery.JobListOption; +import com.google.cloud.bigquery.BigQuery.QueryResultsOption; +import com.google.cloud.bigquery.BigQuery.TableDataListOption; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.EncryptionConfiguration; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobConfiguration; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.JobStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.JobStatistics.ScriptStatistics; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableResult; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlSyntaxErrorException; +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1.DataFormat; +import com.google.cloud.bigquery.storage.v1.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1.ReadSession; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterators; +import com.google.common.util.concurrent.Uninterruptibles; +import java.lang.ref.ReferenceQueue; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLWarning; +import java.time.Instant; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Random; +import java.util.UUID; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.ThreadFactory; +import java.util.logging.Level; + +/** + * An implementation of {@link java.sql.Statement} for executing BigQuery SQL statement and + * returning the results it produces. + * + * @see BigQueryConnection#createStatement + * @see ResultSet + */ +public class BigQueryStatement extends BigQueryNoOpsStatement { + + // TODO (obada): Update this after benchmarking + private static final int MAX_PROCESS_QUERY_THREADS_CNT = 50; + protected static ExecutorService queryTaskExecutor = + Executors.newFixedThreadPool(MAX_PROCESS_QUERY_THREADS_CNT); + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private static final String DEFAULT_DATASET_NAME = "_google_jdbc"; + private static final String DEFAULT_TABLE_NAME = "temp_table_"; + private static final String JDBC_JOB_PREFIX = "google-jdbc-"; + private static final int MAX_RETRY_COUNT = 5; + private static final long RETRY_DELAY_MS = 2000L; + protected ResultSet currentResultSet; + protected long currentUpdateCount = -1; + protected List jobIds = new ArrayList<>(); + protected JobIdWrapper parentJobId = null; + protected int currentJobIdIndex = -1; + protected List batchQueries = new ArrayList<>(); + protected BigQueryConnection connection; + protected int maxFieldSize = 0; + protected int maxRows = 0; + protected boolean isClosed = false; + protected boolean closeOnCompletion = false; + protected Object cancelLock = new Object(); + protected boolean isCanceled = false; + protected boolean poolable; + protected int queryTimeout = 0; + protected SQLWarning warning; + private int fetchDirection = ResultSet.FETCH_FORWARD; + private int fetchSize; + private String scriptQuery; + private Map extraLabels = new HashMap<>(); + + private BigQueryReadClient bigQueryReadClient = null; + private final BigQuery bigQuery; + + final BigQuerySettings querySettings; + + private BlockingQueue bigQueryFieldValueListWrapperBlockingQueue; + + private BlockingQueue arrowBatchWrapperBlockingQueue; + + // Variables Required for the ReferenceQueue implementation + static ReferenceQueue referenceQueueArrowRs = new ReferenceQueue<>(); + static ReferenceQueue referenceQueueJsonRs = new ReferenceQueue<>(); + static List arrowResultSetFinalizers = + new ArrayList<>(); + static List jsonResultSetFinalizers = + new ArrayList<>(); + + private static final ThreadFactory JDBC_THREAD_FACTORY = + new BigQueryThreadFactory("BigQuery-Thread-"); + + static { + BigQueryDaemonPollingTask.startGcDaemonTask( + referenceQueueArrowRs, + referenceQueueJsonRs, + arrowResultSetFinalizers, + jsonResultSetFinalizers); + } + + @VisibleForTesting + public BigQueryStatement(BigQueryConnection connection) { + this.connection = connection; + this.bigQuery = connection.getBigQuery(); + this.querySettings = generateBigQuerySettings(); + } + + private void resetStatementFields() { + this.isCanceled = false; + this.scriptQuery = null; + this.parentJobId = null; + this.currentJobIdIndex = -1; + this.currentUpdateCount = -1; + } + + private BigQuerySettings generateBigQuerySettings() { + LOG.finest("++enter++"); + + BigQuerySettings.Builder querySettings = BigQuerySettings.newBuilder(); + DatasetId defaultDataset = this.connection.getDefaultDataset(); + if (defaultDataset != null) { + querySettings.setDefaultDataset(this.connection.defaultDataset); + } + Long maxBytesBilled = this.connection.getMaxBytesBilled(); + if (maxBytesBilled > 0) { + querySettings.setMaxBytesBilled(maxBytesBilled); + } + if (this.connection.getLabels() != null && !this.connection.getLabels().isEmpty()) { + querySettings.setLabels(this.connection.getLabels()); + } + querySettings.setMaxResultPerPage(this.connection.getMaxResults()); + querySettings.setUseReadAPI(this.connection.isEnableHighThroughputAPI()); + querySettings.setHighThroughputMinTableSize(this.connection.getHighThroughputMinTableSize()); + querySettings.setHighThroughputActivationRatio( + this.connection.getHighThroughputActivationRatio()); + querySettings.setUnsupportedHTAPIFallback(this.connection.isUnsupportedHTAPIFallback()); + querySettings.setUseQueryCache(this.connection.isUseQueryCache()); + querySettings.setQueryDialect(this.connection.getQueryDialect()); + querySettings.setKmsKeyName(this.connection.getKmsKeyName()); + querySettings.setQueryProperties(this.connection.getQueryProperties()); + querySettings.setAllowLargeResults(this.connection.isAllowLargeResults()); + if (this.connection.getJobTimeoutInSeconds() > 0) { + querySettings.setJobTimeoutMs(this.connection.getJobTimeoutInSeconds() * 1000L); + } + if (this.connection.getDestinationTable() != null) { + querySettings.setDestinationTable(this.connection.getDestinationTable()); + } + if (this.connection.getDestinationDataset() != null) { + querySettings.setDestinationDataset(this.connection.getDestinationDataset()); + querySettings.setDestinationDatasetExpirationTime( + this.connection.getDestinationDatasetExpirationTime()); + } + // only create session if enable session and session info is null + if (this.connection.enableSession) { + if (this.connection.sessionInfoConnectionProperty == null) { + querySettings.setEnableSession(this.connection.isSessionEnabled()); + } else { + querySettings.setSessionInfoConnectionProperty( + this.connection.getSessionInfoConnectionProperty()); + } + } + querySettings.setUseWriteAPI(this.connection.isEnableWriteAPI()); + querySettings.setWriteAPIActivationRowCount(this.connection.getWriteAPIActivationRowCount()); + querySettings.setWriteAPIAppendRowCount(this.connection.getWriteAPIAppendRowCount()); + + return querySettings.build(); + } + + /** + * This method executes a BigQuery SQL query, return a single {@code ResultSet} object. + * + *

    Example of running a query: + * + *

    +   *  Connection connection = DriverManager.getConnection(CONNECTION_URL);
    +   *  Statement bigQueryStatement = bigQueryConnection.createStatement();
    +   *  ResultSet result = bigQueryStatement.executeQuery(QUERY);
    +   * 
    + * + * @param sql BigQuery SQL query + * @return {@code ResultSet} containing the output of the query + * @throws SQLException if a BigQuery access error occurs, this method is called on a closed + * {@code Statement}, the given SQL statement produces multiple or no result sets. + * @see java.sql.Statement#executeQuery(String) + */ + @Override + public ResultSet executeQuery(String sql) throws SQLException { + // TODO: write method to return state variables to original state. + LOG.finest("++enter++"); + logQueryExecutionStart(sql); + try { + QueryJobConfiguration jobConfiguration = + setDestinationDatasetAndTableInJobConfig(getJobConfig(sql).build()); + runQuery(sql, jobConfiguration); + } catch (InterruptedException ex) { + throw new BigQueryJdbcException(ex); + } + + if (!isSingularResultSet()) { + throw new BigQueryJdbcException( + "Query returned more than one or didn't return any ResultSet."); + } + // This contains all the other assertions spec required on this method + return getCurrentResultSet(); + } + + @Override + public long executeLargeUpdate(String sql) throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(sql); + try { + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(sql); + runQuery(sql, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + if (this.currentUpdateCount == -1) { + throw new BigQueryJdbcException( + "Update query expected to return affected row count. Double check query type."); + } + return this.currentUpdateCount; + } + + @Override + public int executeUpdate(String sql) throws SQLException { + LOG.finest("++enter++"); + return checkUpdateCount(executeLargeUpdate(sql)); + } + + int checkUpdateCount(long updateCount) { + LOG.finest("++enter++"); + if (updateCount > Integer.MAX_VALUE) { + LOG.warning("Warning: Table update exceeded maximum limit!"); + // Update count is -2 if update is successful but the update count exceeds Integer.MAX_VALUE + return -2; + } + return (int) updateCount; + } + + @Override + public boolean execute(String sql) throws SQLException { + LOG.finest("++enter++"); + logQueryExecutionStart(sql); + try { + QueryJobConfiguration jobConfiguration = getJobConfig(sql).build(); + // If Large Results are enabled, ensure query type is SELECT + if (isLargeResultsEnabled() && getQueryType(jobConfiguration, null) == SqlType.SELECT) { + jobConfiguration = setDestinationDatasetAndTableInJobConfig(jobConfiguration); + } + runQuery(sql, jobConfiguration); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + return getCurrentResultSet() != null; + } + + StatementType getStatementType(QueryJobConfiguration queryJobConfiguration) throws SQLException { + LOG.finest("++enter++"); + QueryJobConfiguration dryRunJobConfiguration = + queryJobConfiguration.toBuilder().setDryRun(true).build(); + Job job; + try { + job = bigQuery.create(JobInfo.of(dryRunJobConfiguration)); + } catch (BigQueryException ex) { + if (ex.getMessage().contains("Syntax error")) { + throw new BigQueryJdbcSqlSyntaxErrorException(ex); + } + throw new BigQueryJdbcException(ex); + } + QueryStatistics statistics = job.getStatistics(); + return statistics.getStatementType(); + } + + SqlType getQueryType(QueryJobConfiguration jobConfiguration, StatementType statementType) + throws SQLException { + LOG.finest("++enter++"); + if (statementType == null) { + statementType = getStatementType(jobConfiguration); + } + + SqlType sqlType = BigQuerySqlTypeConverter.getSqlTypeFromStatementType(statementType); + LOG.fine( + "Query: %s, Statement Type: %s, SQL Type: %s", + jobConfiguration.getQuery(), statementType, sqlType); + return sqlType; + } + + QueryStatistics getQueryStatistics(QueryJobConfiguration queryJobConfiguration) + throws BigQueryJdbcSqlSyntaxErrorException, BigQueryJdbcException { + LOG.finest("++enter++"); + QueryJobConfiguration dryRunJobConfiguration = + queryJobConfiguration.toBuilder().setDryRun(true).build(); + Job job; + try { + job = this.bigQuery.create(JobInfo.of(dryRunJobConfiguration)); + return job.getStatistics(); + } catch (BigQueryException ex) { + if (ex.getMessage().contains("Syntax error")) { + throw new BigQueryJdbcSqlSyntaxErrorException(ex); + } + throw new BigQueryJdbcException(ex); + } + } + + /** + * Releases this Statement's BigQuery and JDBC resources immediately instead of waiting for this + * to happen when it is automatically closed. These resources include the {@code ResultSet} + * object, batch queries, job IDs, and BigQuery connection
    + * + *

    Calling the method close on a Statement object that is already closed has no effect. + * + * @throws SQLException if a BigQuery access error occurs + */ + @Override + public void close() throws SQLException { + LOG.fine("Closing Statement %s.", this); + if (isClosed()) { + return; + } + + boolean cancelSucceeded = false; + try { + cancel(); // This attempts to cancel jobs and calls closeStatementResources() + cancelSucceeded = true; + } catch (SQLException e) { + LOG.warning("Failed to cancel statement during close().", e); + } finally { + if (!cancelSucceeded) { + closeStatementResources(); + } + this.connection = null; + this.isClosed = true; + } + } + + @Override + public int getMaxFieldSize() { + return this.maxFieldSize; + } + + @Override + public void setMaxFieldSize(int max) { + this.maxFieldSize = max; + } + + @Override + public int getMaxRows() { + return this.maxRows; + } + + @Override + public void setMaxRows(int max) { + this.maxRows = max; + } + + @Override + public void setEscapeProcessing(boolean enable) { + // TODO: verify how to implement this method + } + + @Override + public int getQueryTimeout() { + return this.queryTimeout; + } + + @Override + public void setQueryTimeout(int seconds) { + if (seconds < 0) { + throw new IllegalArgumentException("Query Timeout should be >= 0."); + } + this.queryTimeout = seconds; + } + + /** + * Cancels this {@code Statement} object, the running threads, and BigQuery jobs. + * + * @throws SQLException if a BigQuery access error occurs or this method is called on a closed + * {@code Statement} + */ + @Override + public void cancel() throws SQLException { + LOG.finest("Statement %s cancelled", this); + synchronized (cancelLock) { + this.isCanceled = true; + for (JobId jobId : this.jobIds) { + try { + this.bigQuery.cancel(jobId); + LOG.info("Job " + jobId + "cancelled."); + } catch (BigQueryException e) { + if (e.getMessage() != null + && (e.getMessage().contains("Job is already in state DONE") + || e.getMessage().contains("Error: 3848323"))) { + LOG.warning("Attempted to cancel a job that was already done: " + jobId); + } else { + throw new BigQueryJdbcException(e); + } + } + } + jobIds.clear(); + } + // If a ResultSet exists, then it will be closed as well, closing the + // ownedThreads + closeStatementResources(); + } + + @Override + public SQLWarning getWarnings() { + return this.warning; + } + + @Override + public void clearWarnings() { + this.warning = null; + } + + @Override + public ResultSet getResultSet() { + return this.currentResultSet; + } + + @VisibleForTesting + void setUpdateCount(long count) { + this.currentUpdateCount = count; + } + + @Override + public int getUpdateCount() { + return (int) this.currentUpdateCount; + } + + @Override + public long getLargeUpdateCount() { + return this.currentUpdateCount; + } + + @Override + public boolean getMoreResults() throws SQLException { + return getMoreResults(CLOSE_CURRENT_RESULT); + } + + private void closeStatementResources() throws SQLException { + LOG.finest("++enter++"); + if (this.currentResultSet != null) { + // If Statement has 'CloseOnCompletion' set, resultset might + // call into the same function; In order to avoid stack overflow + // we will cleanup resultset before calling into 'close'. + ResultSet tmp = this.currentResultSet; + this.currentResultSet = null; + tmp.close(); + } + this.batchQueries.clear(); + this.currentUpdateCount = -1; + this.currentJobIdIndex = -1; + if (this.connection != null) { + if (this.connection.isTransactionStarted()) { + this.connection.rollback(); + } + this.connection.removeStatement(this); + } + } + + private boolean isSingularResultSet() { + return this.currentResultSet != null + && (this.parentJobId == null || this.parentJobId.getJobs().size() == 1); + } + + private String generateJobId() { + return JDBC_JOB_PREFIX + UUID.randomUUID().toString(); + } + + private class ExecuteResult { + public final TableResult tableResult; + public final Job job; + + ExecuteResult(TableResult tableResult, Job job) { + this.tableResult = tableResult; + this.job = job; + } + } + + @InternalApi + ExecuteResult executeJob(QueryJobConfiguration jobConfiguration) + throws InterruptedException, BigQueryException, BigQueryJdbcException { + LOG.finest("++enter++"); + Job job = null; + // Location is not properly passed from the connection, + // so we need to explicitly set it; + // Do not set custom JobId here or it will disable jobless queries. + JobId jobId = JobId.newBuilder().setLocation(connection.getLocation()).build(); + if (connection.getUseStatelessQueryMode()) { + Object result = bigQuery.queryWithTimeout(jobConfiguration, jobId, null); + if (result instanceof TableResult) { + TableResult tableResult = (TableResult) result; + if (tableResult.getJobId() != null) { + return new ExecuteResult(tableResult, bigQuery.getJob(tableResult.getJobId())); + } + return new ExecuteResult((TableResult) result, null); + } + + if (result instanceof Job) { + job = (Job) result; + } else { + throw new BigQueryJdbcException("Unexpected result type from queryWithTimeout"); + } + } else { + // Update jobId with custom JobId if jobless query is disabled. + jobId = jobId.toBuilder().setJob(generateJobId()).build(); + JobInfo jobInfo = JobInfo.newBuilder(jobConfiguration).setJobId(jobId).build(); + job = bigQuery.create(jobInfo); + } + + if (job == null) { + throw new BigQueryJdbcException("Failed to create BQ Job."); + } + synchronized (cancelLock) { + if (isCanceled) { + job.cancel(); + throw new BigQueryJdbcException("Query was cancelled."); + } + jobId = job.getJobId(); + jobIds.add(jobId); + } + LOG.info("Query submitted with Job ID: " + job.getJobId().getJob()); + TableResult result = + job.getQueryResults(QueryResultsOption.pageSize(querySettings.getMaxResultPerPage())); + synchronized (cancelLock) { + jobIds.remove(jobId); + } + return new ExecuteResult(result, job); + } + + /** + * Execute the SQL script and sets the reference of the underlying job, passing null querySettings + * will result in the FastQueryPath + */ + @InternalApi + void runQuery(String query, QueryJobConfiguration jobConfiguration) + throws SQLException, InterruptedException { + LOG.finest("++enter++"); + LOG.fine("Run Query started"); + + if (queryTimeout > 0) { + jobConfiguration = + jobConfiguration.toBuilder().setJobTimeoutMs(Long.valueOf(queryTimeout) * 1000).build(); + } + + try { + resetStatementFields(); + ExecuteResult executeResult = executeJob(jobConfiguration); + StatementType statementType = + executeResult.job == null + ? getStatementType(jobConfiguration) + : ((QueryStatistics) executeResult.job.getStatistics()).getStatementType(); + SqlType queryType = getQueryType(jobConfiguration, statementType); + handleQueryResult(query, executeResult.tableResult, queryType); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } catch (BigQueryException ex) { + if (ex.getMessage().contains("Syntax error")) { + throw new BigQueryJdbcSqlSyntaxErrorException(ex); + } + throw new BigQueryJdbcException(ex); + } + } + + private boolean isLargeResultsEnabled() { + String destinationTable = this.querySettings.getDestinationTable(); + String destinationDataset = this.querySettings.getDestinationDataset(); + return destinationDataset != null || destinationTable != null; + } + + private QueryJobConfiguration setDestinationDatasetAndTableInJobConfig( + QueryJobConfiguration jobConfiguration) { + String destinationTable = this.querySettings.getDestinationTable(); + String destinationDataset = this.querySettings.getDestinationDataset(); + if (destinationDataset != null || destinationTable != null) { + if (destinationDataset != null) { + checkIfDatasetExistElseCreate(destinationDataset); + } + if (jobConfiguration.useLegacySql() && destinationDataset == null) { + checkIfDatasetExistElseCreate(DEFAULT_DATASET_NAME); + destinationDataset = DEFAULT_DATASET_NAME; + } + if (destinationTable == null) { + destinationTable = getDefaultDestinationTable(); + } + return jobConfiguration.toBuilder() + .setAllowLargeResults(this.querySettings.getAllowLargeResults()) + .setDestinationTable(TableId.of(destinationDataset, destinationTable)) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setWriteDisposition(JobInfo.WriteDisposition.WRITE_TRUNCATE) + .build(); + } + return jobConfiguration; + } + + Job getNextJob() { + while (this.currentJobIdIndex + 1 < this.parentJobId.getJobs().size()) { + this.currentJobIdIndex += 1; + Job currentJob = this.parentJobId.getJobs().get(this.currentJobIdIndex); + QueryStatistics queryStatistics = currentJob.getStatistics(); + ScriptStatistics scriptStatistics = queryStatistics.getScriptStatistics(); + // EXPRESSION jobs are not relevant for customer query and can be + // created by BQ depending on various conditions. We will just ignore + // them when presenting results. + if (!"expression".equalsIgnoreCase(scriptStatistics.getEvaluationKind())) { + return currentJob; + } + } + return null; + } + + void handleQueryResult(String query, TableResult results, SqlType queryType) + throws SQLException, InterruptedException { + LOG.finest("++enter++"); + switch (queryType) { + case SELECT: + processQueryResponse(query, results); + break; + case DML: + case DML_EXTRA: + try { + Job completedJob = this.bigQuery.getJob(results.getJobId()).waitFor(); + JobStatistics.QueryStatistics statistics = completedJob.getStatistics(); + updateAffectedRowCount(statistics.getNumDmlAffectedRows()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } catch (NullPointerException ex) { + throw new BigQueryJdbcException(ex); + } + break; + case TCL: + case DDL: + updateAffectedRowCount(results.getTotalRows()); + break; + case SCRIPT: + try { + Page childJobs = + this.bigQuery.listJobs(JobListOption.parentJobId(results.getJobId().getJob())); + + ArrayList childJobList = new ArrayList<>(); + Iterator iterableJobs = childJobs.iterateAll().iterator(); + iterableJobs.forEachRemaining(childJobList::add); + Collections.reverse(childJobList); + + this.scriptQuery = query; + this.parentJobId = new JobIdWrapper(results.getJobId(), results, childJobList); + this.currentJobIdIndex = -1; + + Job currentJob = getNextJob(); + if (currentJob == null) { + return; + } + StatementType statementType = + ((QueryStatistics) (currentJob.getStatistics())).getStatementType(); + SqlType sqlType = getQueryType(currentJob.getConfiguration(), statementType); + handleQueryResult(query, currentJob.getQueryResults(), sqlType); + } catch (NullPointerException ex) { + throw new BigQueryJdbcException(ex); + } + break; + case OTHER: + throw new BigQueryJdbcException(String.format("Unexpected value: " + queryType)); + } + } + + private void updateAffectedRowCount(Long count) throws SQLException { + // TODO(neenu): check if this need to be closed vs removed) + if (this.currentResultSet != null) { + try { + this.currentResultSet.close(); + this.currentResultSet = null; + } catch (SQLException ex) { + throw new BigQueryJdbcException(ex); + } + } + this.currentUpdateCount = count; + } + + @InternalApi + BigQueryReadClient getBigQueryReadClient() { + if (this.bigQueryReadClient == null) { + this.bigQueryReadClient = this.connection.getBigQueryReadClient(); + } + return this.bigQueryReadClient; + } + + @InternalApi + ReadSession getReadSession(CreateReadSessionRequest readSessionRequest) { + LOG.finest("++enter++"); + return getBigQueryReadClient().createReadSession(readSessionRequest); + } + + @InternalApi + ArrowSchema getArrowSchema(ReadSession readSession) { + return readSession.getArrowSchema(); + } + + /** Uses Bigquery Storage Read API and returns the stream as ResultSet */ + @InternalApi + ResultSet processArrowResultSet(TableResult results) throws SQLException { + LOG.finest("++enter++"); + + // set the resultset + long totalRows = (getMaxRows() > 0) ? getMaxRows() : results.getTotalRows(); + JobId currentJobId = results.getJobId(); + TableId destinationTable = getDestinationTable(currentJobId); + Schema schema = results.getSchema(); + try { + String parent = String.format("projects/%s", destinationTable.getProject()); + String srcTable = + String.format( + "projects/%s/datasets/%s/tables/%s", + destinationTable.getProject(), + destinationTable.getDataset(), + destinationTable.getTable()); + + // Read all the columns if the source table (temp table) and stream the data back in Arrow + // format + ReadSession.Builder sessionBuilder = + ReadSession.newBuilder().setTable(srcTable).setDataFormat(DataFormat.ARROW); + + CreateReadSessionRequest.Builder builder = + CreateReadSessionRequest.newBuilder() + .setParent(parent) + .setReadSession(sessionBuilder) + .setMaxStreamCount(1); + + ReadSession readSession = getReadSession(builder.build()); + this.arrowBatchWrapperBlockingQueue = new LinkedBlockingDeque<>(getBufferSize()); + // deserialize and populate the buffer async, so that the client isn't blocked + Thread populateBufferWorker = + populateArrowBufferedQueue( + readSession, this.arrowBatchWrapperBlockingQueue, this.bigQueryReadClient); + + BigQueryArrowResultSet arrowResultSet = + BigQueryArrowResultSet.of( + schema, + getArrowSchema(readSession), + totalRows, + this, + this.arrowBatchWrapperBlockingQueue, + populateBufferWorker, + this.bigQuery); + arrowResultSetFinalizers.add( + new BigQueryResultSetFinalizers.ArrowResultSetFinalizer( + arrowResultSet, referenceQueueArrowRs, populateBufferWorker)); + arrowResultSet.setJobId(currentJobId); + return arrowResultSet; + + } catch (Exception ex) { + throw new BigQueryJdbcException(ex.getMessage(), ex); + } + } + + /** Asynchronously reads results and populates an arrow record queue */ + @InternalApi + Thread populateArrowBufferedQueue( + ReadSession readSession, + BlockingQueue arrowBatchWrapperBlockingQueue, + BigQueryReadClient bqReadClient) { + LOG.finest("++enter++"); + + Runnable arrowStreamProcessor = + () -> { + long rowsRead = 0; + int retryCount = 0; + try { + // Use the first stream to perform reading. + String streamName = readSession.getStreams(0).getName(); + + while (true) { + try { + ReadRowsRequest readRowsRequest = + ReadRowsRequest.newBuilder() + .setReadStream(streamName) + .setOffset(rowsRead) + .build(); + + // Process each block of rows as they arrive and decode using our simple row reader. + com.google.api.gax.rpc.ServerStream stream = + bqReadClient.readRowsCallable().call(readRowsRequest); + for (ReadRowsResponse response : stream) { + if (Thread.currentThread().isInterrupted() || queryTaskExecutor.isShutdown()) { + break; + } + + ArrowRecordBatch currentBatch = response.getArrowRecordBatch(); + Uninterruptibles.putUninterruptibly( + arrowBatchWrapperBlockingQueue, BigQueryArrowBatchWrapper.of(currentBatch)); + rowsRead += response.getRowCount(); + } + break; + } catch (com.google.api.gax.rpc.ApiException e) { + if (e.getStatusCode().getCode() + == com.google.api.gax.rpc.StatusCode.Code.NOT_FOUND) { + LOG.warning("Read session expired or not found: %s", e.getMessage()); + enqueueError(arrowBatchWrapperBlockingQueue, e); + break; + } + if (retryCount >= MAX_RETRY_COUNT) { + LOG.log( + Level.SEVERE, + "\n" + + Thread.currentThread().getName() + + " Interrupted @ arrowStreamProcessor, max retries exceeded", + e); + enqueueError(arrowBatchWrapperBlockingQueue, e); + break; + } + retryCount++; + LOG.warning( + "Connection interrupted during arrow stream read, retrying. attempt: %d", + retryCount); + Thread.sleep(RETRY_DELAY_MS); + } + } + + } catch (InterruptedException e) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ arrowStreamProcessor", + e); + enqueueError(arrowBatchWrapperBlockingQueue, e); + Thread.currentThread().interrupt(); + } catch (Exception e) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Error @ arrowStreamProcessor", + e); + enqueueError(arrowBatchWrapperBlockingQueue, e); + } finally { // logic needed for graceful shutdown + enqueueEndOfStream(arrowBatchWrapperBlockingQueue); + } + }; + + Thread populateBufferWorker = JDBC_THREAD_FACTORY.newThread(arrowStreamProcessor); + populateBufferWorker.start(); + return populateBufferWorker; + } + + /** Executes SQL query using either fast query path or read API */ + void processQueryResponse(String query, TableResult results) throws SQLException { + LOG.finest( + "API call completed{Query=%s, Parent Job ID=%s, Total rows=%s} ", + query, results.getJobId(), results.getTotalRows()); + JobId currentJobId = results.getJobId(); + if (currentJobId == null) { + LOG.fine("Standard API with Stateless query used."); + this.currentResultSet = processJsonResultSet(results); + } else if (useReadAPI(results)) { + LOG.fine("HighThroughputAPI used."); + LOG.info("HTAPI job ID: " + currentJobId.getJob()); + this.currentResultSet = processArrowResultSet(results); + } else { + // read API cannot be used. + LOG.fine("Standard API used."); + this.currentResultSet = processJsonResultSet(results); + } + this.currentUpdateCount = -1; + } + + // The read Ratio should be met + // AND the User must not have disabled the Read API + @VisibleForTesting + boolean useReadAPI(TableResult results) throws BigQueryJdbcSqlFeatureNotSupportedException { + LOG.finest("++enter++"); + if (!meetsReadRatio(results)) { + return false; + } + LOG.fine("Read API threshold is met."); + return querySettings.getUseReadAPI(); + } + + private boolean meetsReadRatio(TableResult results) { + LOG.finest("++enter++"); + long totalRows = results.getTotalRows(); + + if (totalRows == 0 || totalRows < querySettings.getHighThroughputMinTableSize()) { + return false; + } + + // TODO(BQ Team): TableResult doesnt expose the number of records in the current page, hence the + // below log iterates and counts. This is inefficient and we may eventually want to expose + // PageSize with TableResults + // TODO(Obada): Scope for performance optimization. + int pageSize = Iterators.size(results.getValues().iterator()); + return totalRows / pageSize > querySettings.getHighThroughputActivationRatio(); + } + + BigQueryJsonResultSet processJsonResultSet(TableResult results) { + String jobIdOrQueryId = + results.getJobId() == null ? results.getQueryId() : results.getJobId().getJob(); + LOG.info("BigQuery Job %s completed. Fetching results.", jobIdOrQueryId); + List threadList = new ArrayList(); + + Schema schema = results.getSchema(); + long totalRows = (getMaxRows() > 0) ? getMaxRows() : results.getTotalRows(); + this.bigQueryFieldValueListWrapperBlockingQueue = new LinkedBlockingDeque<>(getBufferSize()); + BlockingQueue> rpcResponseQueue = + new LinkedBlockingDeque<>(getPageCacheSize(getBufferSize(), schema)); + + JobId jobId = results.getJobId(); + if (jobId != null) { + // Thread to make rpc calls to fetch data from the server + Thread nextPageWorker = + runNextPageTaskAsync( + results, + results.getNextPageToken(), + jobId, + rpcResponseQueue, + this.bigQueryFieldValueListWrapperBlockingQueue); + threadList.add(nextPageWorker); + } else { + try { + populateFirstPage(results, rpcResponseQueue); + rpcResponseQueue.put(Tuple.of(null, false)); + } catch (InterruptedException e) { + LOG.warning( + "%s Interrupted @ processJsonQueryResponseResults: %s", + Thread.currentThread().getName(), e.getMessage()); + } + } + + // Thread to parse data received from the server to client library objects + Thread populateBufferWorker = + parseAndPopulateRpcDataAsync( + schema, this.bigQueryFieldValueListWrapperBlockingQueue, rpcResponseQueue); + threadList.add(populateBufferWorker); + + Thread[] jsonWorkers = threadList.toArray(new Thread[0]); + + BigQueryJsonResultSet jsonResultSet = + BigQueryJsonResultSet.of( + schema, + totalRows, + this.bigQueryFieldValueListWrapperBlockingQueue, + this, + jsonWorkers, + this.bigQuery); + jsonResultSet.setJobId(jobId); + jsonResultSet.setQueryId(results.getQueryId()); + jsonResultSetFinalizers.add( + new BigQueryResultSetFinalizers.JsonResultSetFinalizer( + jsonResultSet, referenceQueueJsonRs, jsonWorkers)); + return jsonResultSet; + } + + void populateFirstPage( + TableResult result, BlockingQueue> rpcResponseQueue) { + LOG.finest("++enter++"); + // parse and put the first page in the pageCache before the other pages are parsed from the RPC + // calls + try { + // this is the first page which we have received. + rpcResponseQueue.put(Tuple.of(result, true)); + } catch (InterruptedException e) { + LOG.warning( + "%s Interrupted @ populateFirstPage: %s", + Thread.currentThread().getName(), e.getMessage()); + } + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + if (direction != ResultSet.FETCH_FORWARD) { + throw new BigQueryJdbcSqlFeatureNotSupportedException("Only FETCH_FORWARD is supported."); + } + this.fetchDirection = direction; + } + + @VisibleForTesting + Thread runNextPageTaskAsync( + TableResult result, + String firstPageToken, + JobId jobId, + BlockingQueue> rpcResponseQueue, + BlockingQueue bigQueryFieldValueListWrapperBlockingQueue) { + LOG.finest("++enter++"); + // parse and put the first page in the pageCache before the other pages are parsed from the RPC + // calls + populateFirstPage(result, rpcResponseQueue); + + // This thread makes the RPC calls and paginates + Runnable nextPageTask = + () -> { + String currentPageToken = firstPageToken; + TableResult currentResults = result; + TableId destinationTable = null; + if (firstPageToken != null) { + destinationTable = getDestinationTable(jobId); + } + + try { + while (currentPageToken != null) { + // do not process further pages and shutdown + if (Thread.currentThread().isInterrupted() || queryTaskExecutor.isShutdown()) { + LOG.warning( + "%s Interrupted @ runNextPageTaskAsync", Thread.currentThread().getName()); + break; + } + + long startTime = System.nanoTime(); + currentResults = + this.bigQuery.listTableData( + destinationTable, + TableDataListOption.pageSize(querySettings.getMaxResultPerPage()), + TableDataListOption.pageToken(currentPageToken)); + + currentPageToken = currentResults.getNextPageToken(); + // this will be parsed asynchronously without blocking the current + // thread + Uninterruptibles.putUninterruptibly(rpcResponseQueue, Tuple.of(currentResults, true)); + LOG.fine( + "Fetched %d results from the server in %d ms.", + querySettings.getMaxResultPerPage(), + (int) ((System.nanoTime() - startTime) / 1000000)); + } + } catch (Exception ex) { + Uninterruptibles.putUninterruptibly( + bigQueryFieldValueListWrapperBlockingQueue, + BigQueryFieldValueListWrapper.ofError(new BigQueryJdbcRuntimeException(ex))); + } finally { + // this will stop the parseDataTask as well when the pagination + // completes + Uninterruptibles.putUninterruptibly(rpcResponseQueue, Tuple.of(null, false)); + } + // We cannot do queryTaskExecutor.shutdownNow() here as populate buffer method may not + // have finished processing the records and even that will be interrupted + }; + + Thread nextPageWorker = JDBC_THREAD_FACTORY.newThread(nextPageTask); + nextPageWorker.start(); + return nextPageWorker; + } + + /** + * Takes TableResult from rpcResponseQueue and populates + * bigQueryFieldValueListWrapperBlockingQueue with FieldValueList + */ + @VisibleForTesting + Thread parseAndPopulateRpcDataAsync( + Schema schema, + BlockingQueue bigQueryFieldValueListWrapperBlockingQueue, + BlockingQueue> rpcResponseQueue) { + LOG.finest("++enter++"); + + Runnable populateBufferRunnable = + () -> { // producer thread populating the buffer + try { + Iterable fieldValueLists; + // as we have to process the first page + boolean hasRows = true; + while (hasRows) { + try { + Tuple nextPageTuple = rpcResponseQueue.take(); + if (nextPageTuple.x() != null) { + fieldValueLists = nextPageTuple.x().getValues(); + } else { + fieldValueLists = null; + } + hasRows = nextPageTuple.y(); + + } catch (InterruptedException e) { + LOG.log(Level.WARNING, "\n" + Thread.currentThread().getName() + " Interrupted", e); + // Thread might get interrupted while calling the Cancel method, which is + // expected, so logging this instead of throwing the exception back + break; + } + + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor.isShutdown() + || fieldValueLists == null) { + // do not process further pages and shutdown (outerloop) + break; + } + + long startTime = System.nanoTime(); + long results = 0; + for (FieldValueList fieldValueList : fieldValueLists) { + + if (Thread.currentThread().isInterrupted() || queryTaskExecutor.isShutdown()) { + // do not process further pages and shutdown (inner loop) + break; + } + Uninterruptibles.putUninterruptibly( + bigQueryFieldValueListWrapperBlockingQueue, + BigQueryFieldValueListWrapper.of(schema.getFields(), fieldValueList)); + results += 1; + } + LOG.fine( + "Processed %d results in %d ms.", + results, (int) ((System.nanoTime() - startTime) / 1000000)); + } + + } catch (Exception ex) { + LOG.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Error @ populateBufferAsync", + ex); + enqueueBufferError(bigQueryFieldValueListWrapperBlockingQueue, ex); + } finally { + enqueueBufferEndOfStream(bigQueryFieldValueListWrapperBlockingQueue); + } + }; + + Thread populateBufferWorker = JDBC_THREAD_FACTORY.newThread(populateBufferRunnable); + populateBufferWorker.start(); + return populateBufferWorker; + } + + /** + * Helper method that determines the optimal number of caches pages to improve read performance + */ + @VisibleForTesting + int getPageCacheSize(Integer numBufferedRows, Schema schema) { + LOG.finest("++enter++"); + // Min number of pages to cache + final int MIN_CACHE_SIZE = 3; + // Min number of pages to cache + final int MAX_CACHE_SIZE = 20; + int numColumns = schema.getFields().size(); + int numCachedPages; + long numCachedRows = numBufferedRows == null ? 0 : numBufferedRows.longValue(); + + // TODO: Further enhance this logic depending on customer feedback on memory consumption + if (numCachedRows > 10000) { + // the size of numBufferedRows is quite large and as per our tests we should be able to + // do enough even with low + numCachedPages = 2; + } + // too many fields are being read, setting the page size on the lower end + else if (numColumns > 15 && numCachedRows > 5000) { + numCachedPages = 3; + } + // low pagesize with fewer number of columns, we can cache more pages + else if (numCachedRows < 2000 && numColumns < 15) { + numCachedPages = 20; + } + // default - under 10K numCachedRows with any number of columns + else { + numCachedPages = 5; + } + return numCachedPages < MIN_CACHE_SIZE + ? MIN_CACHE_SIZE + : (Math.min(numCachedPages, MAX_CACHE_SIZE)); + } + + @Override + public int getFetchDirection() { + return this.fetchDirection; + } + + // TODO(neenu): Fix this value + // getNumBufferedRows in querySettings is always the same withDefaultValues - 20000 buffer size + // So, getBufferSize is also 20000. + private int getBufferSize() { + return (this.querySettings == null + || this.querySettings.getNumBufferedRows() == null + || this.querySettings.getNumBufferedRows() < 10000 + ? 20000 + : Math.min(this.querySettings.getNumBufferedRows() * 2, 100000)); + } + + /** Returns the destinationTable from jobId by calling `jobs.get` API */ + TableId getDestinationTable(JobId jobId) { + Job job = this.bigQuery.getJob(jobId); + LOG.finest("Destination Table retrieved from %s", job.getJobId()); + return ((QueryJobConfiguration) job.getConfiguration()).getDestinationTable(); + } + + QueryJobConfiguration.Builder getJobConfig(String query) { + LOG.finest("++enter++"); + QueryJobConfiguration.Builder queryConfigBuilder = QueryJobConfiguration.newBuilder(query); + if (this.querySettings.getJobTimeoutMs() > 0) { + queryConfigBuilder.setJobTimeoutMs(this.querySettings.getJobTimeoutMs()); + } + if (this.querySettings.getMaxBytesBilled() > 0) { + queryConfigBuilder.setMaximumBytesBilled(this.querySettings.getMaxBytesBilled()); + } + if (this.querySettings.getDefaultDataset() != null) { + queryConfigBuilder.setDefaultDataset(this.querySettings.getDefaultDataset()); + } + Map mergedLabels = new HashMap<>(); + if (this.querySettings.getLabels() != null) { + mergedLabels.putAll(this.querySettings.getLabels()); + } + if (this.extraLabels != null) { + mergedLabels.putAll(this.extraLabels); + } + queryConfigBuilder.setLabels(mergedLabels); + queryConfigBuilder.setUseQueryCache(this.querySettings.getUseQueryCache()); + queryConfigBuilder.setMaxResults(this.querySettings.getMaxResultPerPage()); + if (this.querySettings.getSessionInfoConnectionProperty() != null) { + queryConfigBuilder.setConnectionProperties( + ImmutableList.of(this.querySettings.getSessionInfoConnectionProperty())); + } else { + queryConfigBuilder.setCreateSession(querySettings.isEnableSession()); + } + if (this.querySettings.getKmsKeyName() != null) { + EncryptionConfiguration encryption = + EncryptionConfiguration.newBuilder() + .setKmsKeyName(this.querySettings.getKmsKeyName()) + .build(); + queryConfigBuilder.setDestinationEncryptionConfiguration(encryption); + } + if (this.querySettings.getQueryProperties() != null) { + queryConfigBuilder.setConnectionProperties(this.querySettings.getQueryProperties()); + } + boolean useLegacy = + QueryDialectType.BIG_QUERY.equals( + QueryDialectType.valueOf(this.querySettings.getQueryDialect())); + queryConfigBuilder.setUseLegacySql(useLegacy); + + return queryConfigBuilder; + } + + private void checkIfDatasetExistElseCreate(String datasetName) { + Dataset dataset = bigQuery.getDataset(DatasetId.of(datasetName)); + if (dataset == null) { + LOG.info("Creating a hidden dataset: %s ", datasetName); + DatasetInfo datasetInfo = + DatasetInfo.newBuilder(datasetName) + .setDefaultTableLifetime(this.querySettings.getDestinationDatasetExpirationTime()) + .build(); + bigQuery.create(datasetInfo); + } + } + + private String getDefaultDestinationTable() { + String timeOfCreation = String.valueOf(Instant.now().toEpochMilli()); + String randomizedId = String.valueOf(new Random().nextInt(9999)); + return DEFAULT_TABLE_NAME + timeOfCreation + randomizedId; + } + + @InternalApi + JobIdWrapper insertJob(JobConfiguration jobConfiguration) throws SQLException { + Job job; + JobInfo jobInfo = JobInfo.of(jobConfiguration); + LOG.finest("++enter++"); + try { + job = this.bigQuery.create(jobInfo); + } catch (BigQueryException ex) { + throw new BigQueryJdbcException(ex); + } + return new JobIdWrapper(job.getJobId(), null, null); + } + + @Override + public void setFetchSize(int rows) { + this.fetchSize = rows; + } + + @Override + public int getFetchSize() { + return this.fetchSize; + } + + /** + * Gets the extra labels for this statement. + * + * @return A map of the extra labels. + */ + public Map getExtraLabels() { + return this.extraLabels; + } + + /** + * Sets the extra labels for this statement. + * + * @param extraLabels A map of the extra labels. + */ + public void setExtraLabels(Map extraLabels) { + this.extraLabels = extraLabels; + } + + @Override + public int getResultSetConcurrency() { + return ResultSet.CONCUR_READ_ONLY; + } + + ResultSet getCurrentResultSet() { + return this.currentResultSet; + } + + @Override + public int getResultSetType() { + return ResultSet.TYPE_FORWARD_ONLY; + } + + /** + * Wraps jobId and the firstPage of QueryResponse, so that we can avoid RPC to fetch the first + * page again + */ + static class JobIdWrapper { + + private JobId jobId; + private TableResult firstPage; + private ArrayList jobs; + + public JobIdWrapper(JobId jobId, TableResult firstPage, ArrayList jobs) { + this.jobId = jobId; + this.firstPage = firstPage; + this.jobs = jobs; + } + + JobId getJobId() { + return this.jobId; + } + + void setJobId(JobId jobId) { + this.jobId = jobId; + } + + TableResult getResults() { + return this.firstPage; + } + + void setResults(TableResult firstPage) { + this.firstPage = firstPage; + } + + ArrayList getJobs() { + return jobs; + } + + void setJobs(ArrayList jobs) { + this.jobs = jobs; + } + } + + @Override + public void addBatch(String sql) throws SQLException { + if (sql == null || sql.isEmpty()) { + return; + } + LOG.finest("++enter++"); + sql = sql.trim(); + if (!sql.endsWith(";")) { + sql += "; "; + } + SqlType sqlType = getQueryType(QueryJobConfiguration.newBuilder(sql).build(), null); + if (!SqlType.DML.equals(sqlType)) { + throw new IllegalArgumentException("addBatch currently supports DML operations."); + } + this.batchQueries.add(sql); + } + + @Override + public void clearBatch() { + this.batchQueries.clear(); + } + + @Override + public int[] executeBatch() throws SQLException { + LOG.finest("++enter++"); + int[] result = new int[this.batchQueries.size()]; + if (this.batchQueries.isEmpty()) { + return result; + } + + try { + String combinedQueries = String.join("", this.batchQueries); + QueryJobConfiguration.Builder jobConfiguration = getJobConfig(combinedQueries); + jobConfiguration.setPriority(QueryJobConfiguration.Priority.BATCH); + runQuery(combinedQueries, jobConfiguration.build()); + } catch (InterruptedException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + + int i = 0; + while (getUpdateCount() != -1 && i < this.batchQueries.size()) { + result[i] = getUpdateCount(); + getMoreResults(); + i++; + } + + clearBatch(); + return result; + } + + @Override + public Connection getConnection() { + return this.connection; + } + + public boolean hasMoreResults() { + if (this.parentJobId == null) { + return false; + } + return this.currentJobIdIndex + 1 < this.parentJobId.getJobs().size(); + } + + @Override + public boolean getMoreResults(int current) throws SQLException { + LOG.finest("++enter++"); + checkClosed(); + if (current != CLOSE_CURRENT_RESULT) { + throw new BigQueryJdbcSqlFeatureNotSupportedException( + "The JDBC driver only supports Statement.CLOSE_CURRENT_RESULT."); + } + + if (this.parentJobId == null) { + return false; + } + + try { + if (this.currentResultSet != null) { + this.currentResultSet.close(); + this.currentResultSet = null; + // Statement can be closed if it was the last result + if (isClosed) { + return false; + } + } + + Job currentJob = getNextJob(); + if (currentJob != null) { + StatementType statementType = + ((QueryStatistics) (currentJob.getStatistics())).getStatementType(); + SqlType sqlType = getQueryType(currentJob.getConfiguration(), statementType); + handleQueryResult(this.scriptQuery, currentJob.getQueryResults(), sqlType); + + return sqlType == SqlType.SELECT; + } else { + resetStatementFields(); + return false; + } + } catch (InterruptedException | SQLException ex) { + throw new BigQueryJdbcRuntimeException(ex); + } + } + + @Override + public boolean isWrapperFor(Class iface) { + return iface.isInstance(this); + } + + @Override + public T unwrap(Class iface) throws SQLException { + if (!isWrapperFor(iface)) { + throw new BigQueryJdbcException( + String.format("Unable to cast Statement to %s class.", iface.getName())); + } + return (T) this; + } + + @Override + public int getResultSetHoldability() { + return ResultSet.CLOSE_CURSORS_AT_COMMIT; + } + + @Override + public boolean isClosed() { + return this.isClosed; + } + + @Override + public void setPoolable(boolean poolable) { + this.poolable = poolable; + } + + @Override + public boolean isPoolable() { + return this.poolable; + } + + @Override + public void closeOnCompletion() { + this.closeOnCompletion = true; + } + + @Override + public boolean isCloseOnCompletion() { + return this.closeOnCompletion; + } + + protected void logQueryExecutionStart(String sql) { + if (sql == null) { + return; + } + String sanitizedSql = sql.trim().replaceAll("\\s+", " "); + String truncatedSql = + sanitizedSql.length() > 256 ? sanitizedSql.substring(0, 256) + "..." : sanitizedSql; + LOG.info("Executing query: " + truncatedSql); + LOG.info("Using query settings: " + this.querySettings.toString()); + } + + /** Throws a {@link BigQueryJdbcException} if this object is closed */ + void checkClosed() throws SQLException { + if (isClosed()) { + throw new BigQueryJdbcException("This " + getClass().getName() + " has been closed"); + } + } + + enum SqlType { + SELECT, + DML, + DML_EXTRA, + DDL, + SCRIPT, + TCL, + OTHER + } + + enum QueryDialectType { + SQL, + BIG_QUERY + } + + private void enqueueError(BlockingQueue queue, Exception e) { + Uninterruptibles.putUninterruptibly( + queue, BigQueryArrowBatchWrapper.ofError(new BigQueryJdbcRuntimeException(e))); + } + + private void enqueueEndOfStream(BlockingQueue queue) { + Uninterruptibles.putUninterruptibly(queue, BigQueryArrowBatchWrapper.of(null, true)); + } + + private void enqueueBufferError(BlockingQueue queue, Exception e) { + Uninterruptibles.putUninterruptibly( + queue, BigQueryFieldValueListWrapper.ofError(new BigQueryJdbcRuntimeException(e))); + } + + private void enqueueBufferEndOfStream(BlockingQueue queue) { + Uninterruptibles.putUninterruptibly(queue, BigQueryFieldValueListWrapper.of(null, null, true)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactory.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactory.java new file mode 100644 index 0000000000..bba57d7311 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactory.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.util.concurrent.ThreadFactory; + +@InternalApi +class BigQueryThreadFactory implements ThreadFactory { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryThreadFactory.class.getName()); + private String threadPrefix; + private int threadSerialNum = 0; + + public BigQueryThreadFactory(String threadPrefix) { + this.threadPrefix = threadPrefix; + } + + public BigQueryThreadFactory() { + this.threadPrefix = "DEFAULT_POOL_"; + } + + @Override + public Thread newThread(Runnable r) { + Thread t = new Thread(r, threadPrefix + (++threadSerialNum)); // non thread safe increment + t.setDaemon(true); + LOG.finest("New thread %s created.", t.getName()); + return t; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercer.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercer.java new file mode 100644 index 0000000000..42640ddf28 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercer.java @@ -0,0 +1,149 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionNotFoundException; +import java.util.Map; + +/** + * Provides a declarative mechanism for coercing an object from one type to another. For example, + * coercion of {@link String} to {@link Integer} can be achieved like this: + * + *

    + *   Integer value = BigQueryTypeCoercer.INSTANCE.coerceTo(Integer.class, "3452148");
    + *   System.out.println(value); // 3452148
    + * 
    + * + * A {@link BigQueryTypeCoercer} is baked with all the default {@link BigQueryCoercion}s from {@link + * BigQueryDefaultCoercions} to coerce all the primitive types. + * + *

    It is also possible to extend the behaviour of {@link BigQueryTypeCoercer} to other custom + * user defined types by creating an implementation of {@link BigQueryCoercion} and register it with + * {@link BigQueryTypeCoercerBuilder} using it's {@link + * BigQueryTypeCoercerBuilder#registerTypeCoercion(BigQueryCoercion)} method. + * + *

    + *   public class TextToStringCoercion extends BigQueryBigQueryCoercion{
    + *
    + *    public TextToStringCoercion() {
    + *       super(Text.class, String.class);
    + *    }
    + *
    + *    @Override
    + *    String coerce(Text text) {
    + *       return text.toString();  // logic to coerce from Text type to String type
    + *    }
    + *  }
    + * 
    + * + * and use it like this + * + *
    + *    byte[] bytesArray = {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33};
    + *    Text text = new Text(bytesArray);
    + *
    + *    BigQueryTypeCoercer typeCoercer = new BigQueryTypeCoercerBuilder()
    + *         .registerCoercion(new TextToStringCoercion())  // registering a custom coercion
    + *         .build();
    + *    System.out.println(typeCoercer.coerceTo(String.class, text));  //  Hello World!
    + * 
    + */ +@InternalApi +class BigQueryTypeCoercer { + private static final BigQueryJdbcCustomLogger LOG = + new BigQueryJdbcCustomLogger(BigQueryTypeCoercer.class.getName()); + + /** A {@link BigQueryTypeCoercer} instance with all the inbuilt {@link BigQueryCoercion}s */ + static BigQueryTypeCoercer INSTANCE; + + static { + INSTANCE = BigQueryDefaultCoercions.builder().build(); + } + + private final Map, Map, BigQueryCoercion>> allCoercions; + + BigQueryTypeCoercer(Map, Map, BigQueryCoercion>> allCoercions) { + this.allCoercions = allCoercions; + } + + /** + * Coerce an object to the type specified. + * + * @param value the object that needs to be coerced. + * @param targetClass the target class for the coercion + * @throws BigQueryJdbcCoercionNotFoundException when coercion can not be performed to the target + * type. + * @throws BigQueryJdbcCoercionException when an error is encountered while performing the + * coercion. + */ + T coerceTo(Class targetClass, Object value) { + Class sourceClass = value == null ? Void.class : value.getClass(); + // FieldValue object for null-values requires special check + if (sourceClass == FieldValue.class && ((FieldValue.class.cast(value)).isNull())) { + sourceClass = Void.class; + } + // No coercion needed + if (sourceClass.equals(targetClass)) { + return targetClass.cast(value); + } + BigQueryCoercion coercion = findCoercion(sourceClass, targetClass); + LOG.finest("%s coercion for %s", coercion, value); + // Value is null case & no explicit coercion + if (sourceClass == Void.class && coercion == null) { + return null; + } + if (coercion == null) { + if (targetClass.equals(String.class)) { + return (T) value.toString(); + } + throw new BigQueryJdbcCoercionNotFoundException(sourceClass, targetClass); + } + try { + return coercion.coerce(sourceClass != Void.class ? value : null); + } catch (Exception ex) { + throw new BigQueryJdbcCoercionException(ex); + } + } + + /** + * Creates a {@link BigQueryTypeCoercerBuilder} with all the default coercions from {@link + * BigQueryDefaultCoercions}. + */ + static BigQueryTypeCoercerBuilder builder() { + return BigQueryDefaultCoercions.builder(); + } + + private BigQueryCoercion findCoercion(Class sourceClass, Class targetClass) { + Map, BigQueryCoercion> bySourceMap = this.allCoercions.get(sourceClass); + // AutoValue generated concrete classes are registered with their abstract classes and not the + // concrete class. Lets make sure the we can find the registered abstract class for such + // classes. The abstract class in these cases would be the super class of the generated + // AutoValue concrete classes. + if (bySourceMap == null) { + Class registeredAbstractClass = sourceClass.getSuperclass(); + bySourceMap = this.allCoercions.get(registeredAbstractClass); + } + // If we still can't find the coercion source class entry then just return. + if (bySourceMap == null) { + return null; + } + return (BigQueryCoercion) bySourceMap.get(targetClass); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerBuilder.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerBuilder.java new file mode 100644 index 0000000000..8539515ed1 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerBuilder.java @@ -0,0 +1,79 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.HashMap; +import java.util.Map; +import java.util.function.Function; + +/** + * A builder to create {@link BigQueryTypeCoercer} to perform the coercion of custom user defined + * types. + */ +@InternalApi +class BigQueryTypeCoercerBuilder { + + private final Map, Map, BigQueryCoercion>> allCoercions; + + BigQueryTypeCoercerBuilder() { + this.allCoercions = new HashMap<>(); + } + + /** + * registers a {@link BigQueryCoercion} + * + * @param coercion A {@link BigQueryCoercion} to register with this builder. + */ + BigQueryTypeCoercerBuilder registerTypeCoercion(BigQueryCoercion coercion) { + Type[] typeArguments = + ((ParameterizedType) coercion.getClass().getGenericInterfaces()[0]) + .getActualTypeArguments(); + Class sourceClass = (Class) typeArguments[0]; + Class targetClass = (Class) typeArguments[1]; + this.registerInternal(coercion, sourceClass, targetClass); + return this; + } + + /** + * registers a {@link BigQueryCoercion} using an implementation of {@link Function} + * + * @param function A {@link Function} to register with the builder. + * @param sourceClass the source class + * @param targetClass the target class + */ + BigQueryTypeCoercerBuilder registerTypeCoercion( + Function function, Class sourceClass, Class targetClass) { + this.registerInternal((BigQueryCoercion) function::apply, sourceClass, targetClass); + return this; + } + + /** builds the {@link BigQueryTypeCoercer} with all the registered {@link BigQueryCoercion}s. */ + BigQueryTypeCoercer build() { + return new BigQueryTypeCoercer(this.allCoercions); + } + + private void registerInternal( + BigQueryCoercion coercion, Class sourceClass, Class targetClass) { + Map, BigQueryCoercion> mapBySource = + this.allCoercions.getOrDefault(sourceClass, new HashMap<>()); + mapBySource.put(targetClass, coercion); + this.allCoercions.putIfAbsent(sourceClass, mapBySource); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercionUtility.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercionUtility.java new file mode 100644 index 0000000000..9a4dc21304 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercionUtility.java @@ -0,0 +1,409 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.api.core.InternalApi; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.Range; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.Period; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.TimeUnit; +import org.apache.arrow.vector.PeriodDuration; +import org.apache.arrow.vector.util.Text; + +@InternalApi +class BigQueryTypeCoercionUtility { + + static BigQueryTypeCoercer INSTANCE; + + static { + INSTANCE = + BigQueryTypeCoercer.builder() + .registerTypeCoercion(new FieldValueToString()) + .registerTypeCoercion(new FieldValueToInteger()) + .registerTypeCoercion(new FieldValueToFloat()) + .registerTypeCoercion(new FieldValueToShort()) + .registerTypeCoercion(new FieldValueToLong()) + .registerTypeCoercion(new FieldValueToDouble()) + .registerTypeCoercion(new FieldValueToBigDecimal()) + .registerTypeCoercion(new FieldValueToBoolean()) + .registerTypeCoercion(new FieldValueToBytesArray()) + .registerTypeCoercion(new FieldValueToTimestamp()) + .registerTypeCoercion(new FieldValueToTime()) + .registerTypeCoercion(new FieldValueToDate()) + .registerTypeCoercion(new FieldValueToObject()) + .registerTypeCoercion(new StringToBytesArray()) + .registerTypeCoercion(new RangeToString()) + .registerTypeCoercion(new IntegerToLong()) + .registerTypeCoercion(new BytesArrayToString()) + + // Read API Type coercions + .registerTypeCoercion(Timestamp::valueOf, LocalDateTime.class, Timestamp.class) + .registerTypeCoercion(Text::toString, Text.class, String.class) + .registerTypeCoercion(new TextToInteger()) + .registerTypeCoercion(new LongToTimestamp()) + .registerTypeCoercion(new LongToTime()) + .registerTypeCoercion(new IntegerToDate()) + .registerTypeCoercion( + (Timestamp ts) -> Date.valueOf(ts.toLocalDateTime().toLocalDate()), + Timestamp.class, + Date.class) + .registerTypeCoercion( + (Timestamp ts) -> Time.valueOf(ts.toLocalDateTime().toLocalTime()), + Timestamp.class, + Time.class) + .registerTypeCoercion( + (Time time) -> // Per JDBC spec, the date component should be 1970-01-01 + Timestamp.valueOf(LocalDateTime.of(LocalDate.ofEpochDay(0), time.toLocalTime())), + Time.class, + Timestamp.class) + .registerTypeCoercion( + (Date date) -> new Timestamp(date.getTime()), Date.class, Timestamp.class) + .registerTypeCoercion(new TimestampToString()) + .registerTypeCoercion(new TimeToString()) + .registerTypeCoercion((Long l) -> l != 0L, Long.class, Boolean.class) + .registerTypeCoercion((Double d) -> d != 0.0d, Double.class, Boolean.class) + .registerTypeCoercion( + (BigDecimal bd) -> bd.compareTo(BigDecimal.ZERO) != 0, + BigDecimal.class, + Boolean.class) + .registerTypeCoercion((Integer i) -> i != 0, Integer.class, Boolean.class) + .registerTypeCoercion((Float f) -> f != 0.0f, Float.class, Boolean.class) + .registerTypeCoercion((Short s) -> s.shortValue() != 0, Short.class, Boolean.class) + .registerTypeCoercion((Boolean b) -> b ? 1L : 0L, Boolean.class, Long.class) + .registerTypeCoercion((Boolean b) -> b ? 1.0d : 0.0d, Boolean.class, Double.class) + .registerTypeCoercion((Boolean b) -> b ? 1.0f : 0.0f, Boolean.class, Float.class) + .registerTypeCoercion((Boolean b) -> (short) (b ? 1 : 0), Boolean.class, Short.class) + .registerTypeCoercion((Boolean b) -> (byte) (b ? 1 : 0), Boolean.class, Byte.class) + .registerTypeCoercion( + (Boolean b) -> b ? BigDecimal.ONE : BigDecimal.ZERO, + Boolean.class, + BigDecimal.class) + .registerTypeCoercion(new PeriodDurationToString()) + .registerTypeCoercion(unused -> (byte) 0, Void.class, Byte.class) + .registerTypeCoercion(unused -> 0, Void.class, Integer.class) + .registerTypeCoercion(unused -> 0L, Void.class, Long.class) + .registerTypeCoercion(unused -> 0D, Void.class, Double.class) + .registerTypeCoercion(unused -> 0f, Void.class, Float.class) + .registerTypeCoercion(unused -> (short) 0, Void.class, Short.class) + .build(); + } + + private static class TimestampToString implements BigQueryCoercion { + private static final DateTimeFormatter FORMATTER = + DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss.SSSSSS"); + + @Override + public String coerce(Timestamp value) { + return FORMATTER.format(value.toLocalDateTime()); + } + } + + private static class TimeToString implements BigQueryCoercion { + private static final DateTimeFormatter FORMATTER = DateTimeFormatter.ofPattern("HH:mm:ss.SSS"); + + @Override + public String coerce(Time value) { + return FORMATTER.format(value.toLocalTime()); + } + } + + private static class PeriodDurationToString implements BigQueryCoercion { + + @Override + public String coerce(PeriodDuration value) { + StringBuilder builder = new StringBuilder(); + + // Conversion of Period + Period period = value.getPeriod().normalized(); + + builder + .append(period.getYears()) + .append("-") + .append(period.getMonths()) + .append(" ") + .append(period.getDays()) + .append(" "); + + // Conversion of Duration + Duration duration = value.getDuration(); + if (duration.isNegative()) { + builder.append("-"); + duration = duration.negated(); + } + long hours = duration.toHours(); + duration = duration.minusHours(hours); + long minutes = duration.toMinutes(); + duration = duration.minusMinutes(minutes); + long seconds = duration.getSeconds(); + duration = duration.minusSeconds(seconds); + long microseconds = duration.toNanos() / 1000; + + builder + .append(hours) + .append(":") + .append(minutes) + .append(":") + .append(seconds) + .append(".") + .append(microseconds); + + String result = builder.toString(); + result = result.replaceFirst("--", "-"); + + return result; + } + } + + private static class IntegerToDate implements BigQueryCoercion { + + @Override + public Date coerce(Integer value) { + // For example int 18993 represents 2022-01-01 + // Using LocalDate here to avoid this date getting affected by local time zones. + LocalDate date = LocalDate.ofEpochDay(Long.valueOf(value)); + return Date.valueOf(date); + } + } + + private static class LongToTime implements BigQueryCoercion { + + @Override + public Time coerce(Long value) { + + int HH = (int) TimeUnit.MICROSECONDS.toHours(value); + int MM = (int) (TimeUnit.MICROSECONDS.toMinutes(value) % 60); + int SS = (int) (TimeUnit.MICROSECONDS.toSeconds(value) % 60); + + // Note: BQ Time has a precision of up to six fractional digits (microsecond precision) + // but java.sql.Time do not. So data after seconds is not returned. + return new Time(HH, MM, SS); + } + } + + private static class LongToTimestamp implements BigQueryCoercion { + + @Override + public Timestamp coerce(Long value) { + // Long value is in microseconds. All further calculations should account for the unit. + Instant instant = Instant.ofEpochMilli(value / 1000).plusNanos((value % 1000) * 1000); + // JDBC is defaulting to UTC because BQ UI defaults to UTC. + LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC")); + return Timestamp.valueOf(localDateTime); + } + } + + private static class TextToInteger implements BigQueryCoercion { + + @Override + public Integer coerce(Text value) { + return Integer.parseInt(value.toString()); + } + } + + private static class FieldValueToObject implements BigQueryCoercion { + + @Override + public Object coerce(FieldValue fieldValue) { + return fieldValue.getValue(); + } + } + + private static class FieldValueToDate implements BigQueryCoercion { + + @Override + public Date coerce(FieldValue fieldValue) { + return Date.valueOf(fieldValue.getStringValue()); + } + } + + private static class FieldValueToTime implements BigQueryCoercion { + + @Override + public Time coerce(FieldValue fieldValue) { + // Time ranges from 00:00:00 to 23:59:59.999999 in BigQuery + String strTime = fieldValue.getStringValue(); + try { + LocalTime localTime = LocalTime.parse(strTime); + // Convert LocalTime to milliseconds of the day. This correctly preserves millisecond + // precision and truncates anything smaller + long millis = TimeUnit.NANOSECONDS.toMillis(localTime.toNanoOfDay()); + return new Time(millis); + } catch (java.time.format.DateTimeParseException e) { + throw new IllegalArgumentException( + "Cannot parse the value " + strTime + " to java.sql.Time", e); + } + } + } + + private static class FieldValueToTimestamp implements BigQueryCoercion { + + @Override + public Timestamp coerce(FieldValue fieldValue) { + String rawValue = fieldValue.getStringValue(); + // BigQuery DATETIME strings are formatted like "YYYY-MM-DD'T'HH:MM:SS.fffffffff" + // BigQuery TIMESTAMP strings are numeric epoch seconds. + if (rawValue.contains("T")) { + // It's a DATETIME string. + // Timestamp.valueOf() expects "yyyy-mm-dd hh:mm:ss.fffffffff" format. + return Timestamp.valueOf(rawValue.replace('T', ' ')); + } else { + // It's a TIMESTAMP numeric string. + long microseconds = fieldValue.getTimestampValue(); + Instant instant = Instant.EPOCH.plus(microseconds, ChronoUnit.MICROS); + // JDBC is defaulting to UTC because BQ UI defaults to UTC. + LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC")); + return Timestamp.valueOf(localDateTime); + } + } + } + + private static class FieldValueToBytesArray implements BigQueryCoercion { + + @Override + public byte[] coerce(FieldValue fieldValue) { + return fieldValue.getBytesValue(); + } + } + + private static class StringToBytesArray implements BigQueryCoercion { + + @Override + public byte[] coerce(String value) { + return value.getBytes(); + } + } + + private static class BytesArrayToString implements BigQueryCoercion { + + @Override + public String coerce(byte[] value) { + return java.util.Base64.getEncoder().encodeToString(value); + } + } + + private static class FieldValueToBoolean implements BigQueryCoercion { + + @Override + public Boolean coerce(FieldValue fieldValue) { + return !fieldValue.isNull() && fieldValue.getBooleanValue(); + } + } + + private static class FieldValueToBigDecimal implements BigQueryCoercion { + + @Override + public BigDecimal coerce(FieldValue fieldValue) { + return fieldValue.getNumericValue(); + } + } + + private static class FieldValueToDouble implements BigQueryCoercion { + + @Override + public Double coerce(FieldValue fieldValue) { + return fieldValue.getDoubleValue(); + } + } + + private static class FieldValueToLong implements BigQueryCoercion { + + @Override + public Long coerce(FieldValue fieldValue) { + return fieldValue.getLongValue(); + } + } + + private static class FieldValueToInteger implements BigQueryCoercion { + + @Override + public Integer coerce(FieldValue fieldValue) { + return (int) fieldValue.getLongValue(); + } + } + + private static class FieldValueToFloat implements BigQueryCoercion { + + @Override + public Float coerce(FieldValue fieldValue) { + return (float) fieldValue.getDoubleValue(); + } + } + + private static class FieldValueToShort implements BigQueryCoercion { + + @Override + public Short coerce(FieldValue fieldValue) { + return (short) fieldValue.getLongValue(); + } + } + + private static class FieldValueToString implements BigQueryCoercion { + + @Override + public String coerce(FieldValue fieldValue) { + if (Attribute.REPEATED.equals(fieldValue.getAttribute())) { // Case for Arrays + return fieldValue.getValue().toString(); + } + if (Attribute.RANGE.equals(fieldValue.getAttribute())) { // Range values + Range rangeValue = fieldValue.getRangeValue(); + return INSTANCE.coerceTo(String.class, rangeValue); + } + if (Attribute.RECORD.equals(fieldValue.getAttribute())) { // Case for Structs + return fieldValue.getRecordValue().toString(); + } + return fieldValue.getStringValue(); + } + } + + private static class IntegerToLong implements BigQueryCoercion { + + @Override + public Long coerce(Integer intValue) { + if (intValue == null) { + return 0L; + } + return Long.valueOf(intValue); + } + } + + private static class RangeToString implements BigQueryCoercion { + + @Override + public String coerce(Range value) { + FieldValue startValue = value.getStart(); + FieldValue endValue = value.getEnd(); + + String start = startValue.isNull() ? "UNBOUNDED" : startValue.getStringValue(); + String end = endValue.isNull() ? "UNBOUNDED" : endValue.getStringValue(); + // The start of a range is inclusive, and the end is exclusive. + return String.format("[%s, %s)", start, end); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/DataSource.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/DataSource.java new file mode 100644 index 0000000000..681595f8b0 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/DataSource.java @@ -0,0 +1,1291 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.common.base.Joiner; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import java.io.PrintWriter; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.Map; +import java.util.Properties; +import java.util.function.BiConsumer; +import java.util.logging.Logger; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * BigQuery JDBC implementation of {@link javax.sql.DataSource} + * + *

    A factory for connections to the physical data source that this DataSource object represents. + * An alternative to the DriverManager facility, a DataSource object is the preferred means of + * getting a connection. An object that implements the DataSource interface will typically be + * registered with a naming service based on the Java™ Naming and Directory (JNDI) API. + */ +public class DataSource implements javax.sql.DataSource { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private String URL; + static final ImmutableSet VALID_JOB_CREATION_MODES = ImmutableSet.of(1, 2); + + private String projectId; + private String defaultDataset; + private String location; + private String userAgent; + private Boolean enableHighThroughputAPI; + private Integer highThroughputMinTableSize; + private Integer highThroughputActivationRatio; + private Boolean unsupportedHTAPIFallback; + private String kmsKeyName; + private Map queryProperties; + private String logLevel; + private Boolean enableSession; + private String logPath; + private Integer oAuthType; + private String oAuthServiceAcctEmail; + private String oAuthPvtKeyPath; + private String oAuthPvtKey; + private String oAuthAccessToken; + private String oAuthRefreshToken; + private Boolean useQueryCache; + private String queryDialect; + private Boolean allowLargeResults; + private String destinationTable; + private String destinationDataset; + private Long destinationDatasetExpirationTime; + private String universeDomain; + private String proxyHost; + private String proxyPort; + private String proxyUid; + private String proxyPwd; + private String oAuthClientId; + private String oAuthClientSecret; + private Integer jobCreationMode; + private Long maxResults; + private String partnerToken; + private Boolean enableWriteAPI; + private String additionalProjects; + private Boolean filterTablesOnDefaultDataset; + private Integer requestGoogleDriveScope; + private Integer metadataFetchThreadCount; + private String sslTrustStorePath; + private String sslTrustStorePassword; + private Map labels; + private String requestReason; + private Integer timeout; + private Integer jobTimeout; + private Integer retryInitialDelay; + private Integer retryMaxDelay; + private Integer httpConnectTimeout; + private Integer httpReadTimeout; + private Long maximumBytesBilled; + private Integer swaActivationRowCount; + private Integer swaAppendRowCount; + private String oAuthP12Password; + private String oAuthSAImpersonationEmail; + private String oAuthSAImpersonationChain; + private String oAuthSAImpersonationScopes; + private String oAuthSAImpersonationTokenLifetime; + private String oAuth2TokenUri; + private String byoidAudienceUri; + private String byoidCredentialSource; + private String byoidPoolUserProject; + private String byoidSAImpersonationUri; + private String byoidSubjectTokenType; + private String byoidTokenUri; + private String endpointOverrides; + private String privateServiceConnect; + private Long connectionPoolSize; + private Long listenerPoolSize; + + // Make sure the JDBC driver class is loaded. + static { + try { + Class.forName("com.google.cloud.bigquery.jdbc.BigQueryDriver"); + } catch (ClassNotFoundException ex) { + throw new IllegalStateException( + "DataSource failed to load com.google.cloud.bigquery.jdbc.BigQueryDriver", ex); + } + } + + private static final Map> PROPERTY_SETTERS = + ImmutableMap.>builder() + .put(BigQueryJdbcUrlUtility.PROJECT_ID_PROPERTY_NAME, DataSource::setProjectId) + .put(BigQueryJdbcUrlUtility.DEFAULT_DATASET_PROPERTY_NAME, DataSource::setDefaultDataset) + .put(BigQueryJdbcUrlUtility.LOCATION_PROPERTY_NAME, DataSource::setLocation) + .put( + BigQueryJdbcUrlUtility.ENABLE_HTAPI_PROPERTY_NAME, + (ds, val) -> + ds.setEnableHighThroughputAPI( + BigQueryJdbcUrlUtility.convertIntToBoolean( + val, BigQueryJdbcUrlUtility.ENABLE_HTAPI_PROPERTY_NAME))) + .put( + BigQueryJdbcUrlUtility.UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME, + (ds, val) -> + ds.setUnsupportedHTAPIFallback( + BigQueryJdbcUrlUtility.convertIntToBoolean( + val, BigQueryJdbcUrlUtility.UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME))) + .put( + BigQueryJdbcUrlUtility.HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME, + (ds, val) -> ds.setHighThroughputMinTableSize(Integer.parseInt(val))) + .put( + BigQueryJdbcUrlUtility.HTAPI_ACTIVATION_RATIO_PROPERTY_NAME, + (ds, val) -> ds.setHighThroughputActivationRatio(Integer.parseInt(val))) + .put(BigQueryJdbcUrlUtility.KMS_KEY_NAME_PROPERTY_NAME, DataSource::setKmsKeyName) + .put( + BigQueryJdbcUrlUtility.QUERY_PROPERTIES_NAME, + (ds, val) -> + ds.setQueryProperties( + BigQueryJdbcUrlUtility.parsePropertiesMapFromValue( + val, BigQueryJdbcUrlUtility.QUERY_PROPERTIES_NAME, "DataSource"))) + .put( + BigQueryJdbcUrlUtility.ENABLE_SESSION_PROPERTY_NAME, + (ds, val) -> + ds.setEnableSession( + BigQueryJdbcUrlUtility.convertIntToBoolean( + val, BigQueryJdbcUrlUtility.ENABLE_SESSION_PROPERTY_NAME))) + .put(BigQueryJdbcUrlUtility.LOG_LEVEL_PROPERTY_NAME, DataSource::setLogLevel) + .put(BigQueryJdbcUrlUtility.LOG_PATH_PROPERTY_NAME, DataSource::setLogPath) + .put( + BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME, + (ds, val) -> ds.setOAuthType(Integer.parseInt(val))) + .put( + BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME, + DataSource::setOAuthServiceAcctEmail) + .put( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME, + DataSource::setOAuthPvtKeyPath) + .put(BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME, DataSource::setOAuthPvtKey) + .put( + BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME, + DataSource::setOAuthAccessToken) + .put( + BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME, + DataSource::setOAuthRefreshToken) + .put( + BigQueryJdbcUrlUtility.USE_QUERY_CACHE_PROPERTY_NAME, + (ds, val) -> + ds.setUseQueryCache( + BigQueryJdbcUrlUtility.convertIntToBoolean( + val, BigQueryJdbcUrlUtility.USE_QUERY_CACHE_PROPERTY_NAME))) + .put(BigQueryJdbcUrlUtility.QUERY_DIALECT_PROPERTY_NAME, DataSource::setQueryDialect) + .put( + BigQueryJdbcUrlUtility.ALLOW_LARGE_RESULTS_PROPERTY_NAME, + (ds, val) -> + ds.setAllowLargeResults( + BigQueryJdbcUrlUtility.convertIntToBoolean( + val, BigQueryJdbcUrlUtility.ALLOW_LARGE_RESULTS_PROPERTY_NAME))) + .put( + BigQueryJdbcUrlUtility.LARGE_RESULTS_TABLE_PROPERTY_NAME, + DataSource::setDestinationTable) + .put( + BigQueryJdbcUrlUtility.LARGE_RESULTS_DATASET_PROPERTY_NAME, + DataSource::setDestinationDataset) + .put( + BigQueryJdbcUrlUtility.DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME, + (ds, val) -> ds.setDestinationDatasetExpirationTime(Long.parseLong(val))) + .put( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, + DataSource::setUniverseDomain) + .put(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, DataSource::setProxyHost) + .put(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, DataSource::setProxyPort) + .put(BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME, DataSource::setProxyUid) + .put(BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME, DataSource::setProxyPwd) + .put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, DataSource::setOAuthClientId) + .put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, + DataSource::setOAuthClientSecret) + .put( + BigQueryJdbcUrlUtility.JOB_CREATION_MODE_PROPERTY_NAME, + (ds, val) -> ds.setJobCreationMode(Integer.parseInt(val))) + .put( + BigQueryJdbcUrlUtility.MAX_RESULTS_PROPERTY_NAME, + (ds, val) -> ds.setMaxResults(Long.parseLong(val))) + .put(BigQueryJdbcUrlUtility.PARTNER_TOKEN_PROPERTY_NAME, DataSource::setPartnerToken) + .put( + BigQueryJdbcUrlUtility.ENABLE_WRITE_API_PROPERTY_NAME, + (ds, val) -> + ds.setEnableWriteAPI( + BigQueryJdbcUrlUtility.convertIntToBoolean( + val, BigQueryJdbcUrlUtility.ENABLE_WRITE_API_PROPERTY_NAME))) + .put( + BigQueryJdbcUrlUtility.ADDITIONAL_PROJECTS_PROPERTY_NAME, + DataSource::setAdditionalProjects) + .put( + BigQueryJdbcUrlUtility.FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME, + (ds, val) -> + ds.setFilterTablesOnDefaultDataset( + BigQueryJdbcUrlUtility.convertIntToBoolean( + val, + BigQueryJdbcUrlUtility.FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME))) + .put( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + (ds, val) -> ds.setRequestGoogleDriveScope(Integer.parseInt(val))) + .put( + BigQueryJdbcUrlUtility.METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME, + (ds, val) -> ds.setMetadataFetchThreadCount(Integer.parseInt(val))) + .put( + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PROPERTY_NAME, + DataSource::setSSLTrustStorePath) + .put( + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PWD_PROPERTY_NAME, + DataSource::setSSLTrustStorePassword) + .put( + BigQueryJdbcUrlUtility.LABELS_PROPERTY_NAME, + (ds, val) -> + ds.setLabels( + BigQueryJdbcUrlUtility.parsePropertiesMapFromValue( + val, BigQueryJdbcUrlUtility.LABELS_PROPERTY_NAME, "DataSource"))) + .put(BigQueryJdbcUrlUtility.REQUEST_REASON_PROPERTY_NAME, DataSource::setRequestReason) + .put( + BigQueryJdbcUrlUtility.RETRY_TIMEOUT_IN_SECS_PROPERTY_NAME, + (ds, val) -> ds.setTimeout(Integer.parseInt(val))) + .put( + BigQueryJdbcUrlUtility.JOB_TIMEOUT_PROPERTY_NAME, + (ds, val) -> ds.setJobTimeout(Integer.valueOf(val))) + .put( + BigQueryJdbcUrlUtility.RETRY_INITIAL_DELAY_PROPERTY_NAME, + (ds, val) -> ds.setRetryInitialDelay(Integer.valueOf(val))) + .put( + BigQueryJdbcUrlUtility.RETRY_MAX_DELAY_PROPERTY_NAME, + (ds, val) -> ds.setRetryMaxDelay(Integer.valueOf(val))) + .put( + BigQueryJdbcUrlUtility.HTTP_CONNECT_TIMEOUT_PROPERTY_NAME, + (ds, val) -> ds.setHttpConnectTimeout(Integer.parseInt(val))) + .put( + BigQueryJdbcUrlUtility.HTTP_READ_TIMEOUT_PROPERTY_NAME, + (ds, val) -> ds.setHttpReadTimeout(Integer.parseInt(val))) + .put( + BigQueryJdbcUrlUtility.OAUTH_P12_PASSWORD_PROPERTY_NAME, + DataSource::setOAuthP12Password) + .put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME, + DataSource::setOAuthSAImpersonationEmail) + .put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_CHAIN_PROPERTY_NAME, + DataSource::setOAuthSAImpersonationChain) + .put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME, + DataSource::setOAuthSAImpersonationScopes) + .put( + BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME, + DataSource::setOAuthSAImpersonationTokenLifetime) + .put(BigQueryJdbcUrlUtility.OAUTH2_TOKEN_URI_PROPERTY_NAME, DataSource::setOAuth2TokenUri) + .put( + BigQueryJdbcUrlUtility.BYOID_AUDIENCE_URI_PROPERTY_NAME, + DataSource::setByoidAudienceUri) + .put( + BigQueryJdbcUrlUtility.BYOID_CREDENTIAL_SOURCE_PROPERTY_NAME, + DataSource::setByoidCredentialSource) + .put( + BigQueryJdbcUrlUtility.BYOID_POOL_USER_PROJECT_PROPERTY_NAME, + DataSource::setByoidPoolUserProject) + .put( + BigQueryJdbcUrlUtility.BYOID_SA_IMPERSONATION_URI_PROPERTY_NAME, + DataSource::setByoidSAImpersonationUri) + .put( + BigQueryJdbcUrlUtility.BYOID_SUBJECT_TOKEN_TYPE_PROPERTY_NAME, + DataSource::setByoidSubjectTokenType) + .put(BigQueryJdbcUrlUtility.BYOID_TOKEN_URI_PROPERTY_NAME, DataSource::setByoidTokenUri) + .put( + BigQueryJdbcUrlUtility.ENDPOINT_OVERRIDES_PROPERTY_NAME, + DataSource::setEndpointOverrides) + .put( + BigQueryJdbcUrlUtility.PRIVATE_SERVICE_CONNECT_PROPERTY_NAME, + DataSource::setPrivateServiceConnect) + .put( + BigQueryJdbcUrlUtility.MAX_BYTES_BILLED_PROPERTY_NAME, + (ds, val) -> ds.setMaximumBytesBilled(Long.parseLong(val))) + .put( + BigQueryJdbcUrlUtility.SWA_ACTIVATION_ROW_COUNT_PROPERTY_NAME, + (ds, val) -> ds.setSwaActivationRowCount(Integer.parseInt(val))) + .put( + BigQueryJdbcUrlUtility.SWA_APPEND_ROW_COUNT_PROPERTY_NAME, + (ds, val) -> ds.setSwaAppendRowCount(Integer.parseInt(val))) + .put( + BigQueryJdbcUrlUtility.CONNECTION_POOL_SIZE_PROPERTY_NAME, + (ds, val) -> ds.setConnectionPoolSize(Long.parseLong(val))) + .put( + BigQueryJdbcUrlUtility.LISTENER_POOL_SIZE_PROPERTY_NAME, + (ds, val) -> ds.setListenerPoolSize(Long.parseLong(val))) + .build(); + + public static DataSource fromUrl(String url) { + DataSource dataSource = new DataSource(); + dataSource.setURL(url); + Map properties = BigQueryJdbcUrlUtility.parseUrl(url); + for (Map.Entry entry : properties.entrySet()) { + BiConsumer setter = PROPERTY_SETTERS.get(entry.getKey()); + if (setter != null) { + setter.accept(dataSource, entry.getValue()); + } + } + return dataSource; + } + + public Map getOverrideProperties() { + String overridePropertiesString = null; + if (endpointOverrides != null && !endpointOverrides.isEmpty()) { + overridePropertiesString = endpointOverrides; + } else if (privateServiceConnect != null && !privateServiceConnect.isEmpty()) { + overridePropertiesString = privateServiceConnect; + } + + Map overrideProps = new java.util.HashMap<>(); + if (overridePropertiesString == null || overridePropertiesString.isEmpty()) { + return overrideProps; + } + + for (String property : BigQueryJdbcUrlUtility.OVERRIDE_PROPERTIES) { + Pattern propertyPattern = Pattern.compile(String.format("(?i)%s=(.*?)(?:[,;]|$)", property)); + Matcher propertyMatcher = propertyPattern.matcher(overridePropertiesString); + if (propertyMatcher.find() && propertyMatcher.groupCount() >= 1) { + overrideProps.put(property, propertyMatcher.group(1)); + } + } + return overrideProps; + } + + /** An implementation of DataSource must include a public no-arg constructor. */ + public DataSource() {} + + @Override + public Connection getConnection() throws SQLException { + if (getURL() == null) { + throw new BigQueryJdbcException( + "Connection URL is null. Please specify a valid Connection URL to get Connection."); + } + if (!BigQueryDriver.getRegisteredDriver().acceptsURL(getURL())) { + throw new BigQueryJdbcException( + "The URL " + getURL() + " is invalid. Please specify a valid Connection URL. "); + } + return DriverManager.getConnection(getURL(), createProperties()); + } + + private Properties createProperties() { + Properties connectionProperties = new Properties(); + if (this.projectId != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROJECT_ID_PROPERTY_NAME, this.projectId); + } + if (this.defaultDataset != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.DEFAULT_DATASET_PROPERTY_NAME, this.defaultDataset); + } + if (this.location != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LOCATION_PROPERTY_NAME, this.location); + } + if (this.enableHighThroughputAPI != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ENABLE_HTAPI_PROPERTY_NAME, + String.valueOf(this.enableHighThroughputAPI)); + } + if (this.unsupportedHTAPIFallback != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.UNSUPPORTED_HTAPI_FALLBACK_PROPERTY_NAME, + String.valueOf(this.unsupportedHTAPIFallback)); + } + if (this.highThroughputMinTableSize != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.HTAPI_MIN_TABLE_SIZE_PROPERTY_NAME, + String.valueOf(this.highThroughputMinTableSize)); + } + if (this.highThroughputActivationRatio != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.HTAPI_ACTIVATION_RATIO_PROPERTY_NAME, + String.valueOf(this.highThroughputActivationRatio)); + } + if (this.kmsKeyName != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.KMS_KEY_NAME_PROPERTY_NAME, this.kmsKeyName); + } + if (this.queryProperties != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.QUERY_PROPERTIES_NAME, serializeMap(this.queryProperties)); + } + if (this.enableSession != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ENABLE_SESSION_PROPERTY_NAME, String.valueOf(this.enableSession)); + } + if (this.logLevel != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LOG_LEVEL_PROPERTY_NAME, this.logLevel); + } + if (this.logPath != null) { + connectionProperties.setProperty(BigQueryJdbcUrlUtility.LOG_PATH_PROPERTY_NAME, this.logPath); + } + if (this.oAuthType != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME, String.valueOf(this.oAuthType)); + } + if (this.oAuthServiceAcctEmail != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_SA_EMAIL_PROPERTY_NAME, this.oAuthServiceAcctEmail); + } + if (this.oAuthPvtKeyPath != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PATH_PROPERTY_NAME, this.oAuthPvtKeyPath); + } + if (this.oAuthPvtKey != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_PVT_KEY_PROPERTY_NAME, this.oAuthPvtKey); + } + if (this.oAuthAccessToken != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_ACCESS_TOKEN_PROPERTY_NAME, this.oAuthAccessToken); + } + if (this.oAuthRefreshToken != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_REFRESH_TOKEN_PROPERTY_NAME, this.oAuthRefreshToken); + } + if (this.useQueryCache != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.USE_QUERY_CACHE_PROPERTY_NAME, String.valueOf(this.useQueryCache)); + } + if (this.queryDialect != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.QUERY_DIALECT_PROPERTY_NAME, this.queryDialect); + } + if (this.allowLargeResults != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ALLOW_LARGE_RESULTS_PROPERTY_NAME, + String.valueOf(this.allowLargeResults)); + } + if (this.destinationTable != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LARGE_RESULTS_TABLE_PROPERTY_NAME, this.destinationTable); + } + if (this.destinationDataset != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LARGE_RESULTS_DATASET_PROPERTY_NAME, this.destinationDataset); + } + if (this.destinationDatasetExpirationTime != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.DESTINATION_DATASET_EXPIRATION_TIME_PROPERTY_NAME, + String.valueOf(this.destinationDatasetExpirationTime)); + } + if (this.universeDomain != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, this.universeDomain); + } + if (this.proxyHost != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, this.proxyHost); + } + if (this.proxyPort != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, this.proxyPort); + } + if (this.proxyUid != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_USER_ID_PROPERTY_NAME, this.proxyUid); + } + if (this.proxyPwd != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PROXY_PASSWORD_PROPERTY_NAME, this.proxyPwd); + } + if (this.oAuthClientId != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, this.oAuthClientId); + } + if (this.oAuthClientSecret != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, this.oAuthClientSecret); + } + if (this.jobCreationMode != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.JOB_CREATION_MODE_PROPERTY_NAME, + String.valueOf(this.jobCreationMode)); + } + if (this.maxResults != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.MAX_RESULTS_PROPERTY_NAME, String.valueOf(this.maxResults)); + } + if (this.partnerToken != null && !this.partnerToken.isEmpty()) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.PARTNER_TOKEN_PROPERTY_NAME, this.partnerToken); + } + if (this.enableWriteAPI != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ENABLE_WRITE_API_PROPERTY_NAME, + String.valueOf(this.enableWriteAPI)); + } + if (this.additionalProjects != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.ADDITIONAL_PROJECTS_PROPERTY_NAME, this.additionalProjects); + } + if (this.filterTablesOnDefaultDataset != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.FILTER_TABLES_ON_DEFAULT_DATASET_PROPERTY_NAME, + String.valueOf(this.filterTablesOnDefaultDataset)); + } + if (this.requestGoogleDriveScope != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, + String.valueOf(this.requestGoogleDriveScope)); + } + if (this.metadataFetchThreadCount != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.METADATA_FETCH_THREAD_COUNT_PROPERTY_NAME, + String.valueOf(this.metadataFetchThreadCount)); + } + if (this.sslTrustStorePath != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PROPERTY_NAME, + String.valueOf(this.sslTrustStorePath)); + } + if (this.sslTrustStorePassword != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.SSL_TRUST_STORE_PWD_PROPERTY_NAME, + String.valueOf(this.sslTrustStorePassword)); + } + if (this.labels != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LABELS_PROPERTY_NAME, serializeMap(this.labels)); + } + if (this.requestReason != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.REQUEST_REASON_PROPERTY_NAME, this.requestReason); + } + if (this.timeout != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.RETRY_TIMEOUT_IN_SECS_PROPERTY_NAME, String.valueOf(this.timeout)); + } + if (this.jobTimeout != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.JOB_TIMEOUT_PROPERTY_NAME, String.valueOf(this.jobTimeout)); + } + if (this.retryInitialDelay != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.RETRY_INITIAL_DELAY_PROPERTY_NAME, + String.valueOf(this.retryInitialDelay)); + } + if (this.retryMaxDelay != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.RETRY_MAX_DELAY_PROPERTY_NAME, String.valueOf(this.retryMaxDelay)); + } + if (this.httpConnectTimeout != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.HTTP_CONNECT_TIMEOUT_PROPERTY_NAME, + String.valueOf(this.httpConnectTimeout)); + } + if (this.httpReadTimeout != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.HTTP_READ_TIMEOUT_PROPERTY_NAME, + String.valueOf(this.httpReadTimeout)); + } + if (this.maximumBytesBilled != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.MAX_BYTES_BILLED_PROPERTY_NAME, + String.valueOf(this.maximumBytesBilled)); + } + if (this.swaActivationRowCount != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.SWA_ACTIVATION_ROW_COUNT_PROPERTY_NAME, + String.valueOf(this.swaActivationRowCount)); + } + if (this.swaAppendRowCount != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.SWA_APPEND_ROW_COUNT_PROPERTY_NAME, + String.valueOf(this.swaAppendRowCount)); + } + if (this.connectionPoolSize != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.CONNECTION_POOL_SIZE_PROPERTY_NAME, + String.valueOf(this.connectionPoolSize)); + } + if (this.listenerPoolSize != null) { + connectionProperties.setProperty( + BigQueryJdbcUrlUtility.LISTENER_POOL_SIZE_PROPERTY_NAME, + String.valueOf(this.listenerPoolSize)); + } + return connectionProperties; + } + + private String serializeMap(Map map) { + if (map == null || map.isEmpty()) { + return ""; + } + return Joiner.on(",").withKeyValueSeparator("=").join(map); + } + + @Override + public Connection getConnection(String username, String password) throws SQLException { + LOG.warning( + "Username and Password is not supported in Bigquery JDBC Driver. Values discarded."); + return getConnection(); + } + + public String getURL() { + return URL; + } + + public void setURL(String URL) { + this.URL = URL; + } + + public String getProjectId() { + return projectId != null + ? projectId + : com.google.cloud.bigquery.BigQueryOptions.getDefaultProjectId(); + } + + public void setProjectId(String projectId) { + this.projectId = projectId; + } + + public void setMaxResults(Long maxResults) { + this.maxResults = maxResults; + } + + public Long getMaxResults() { + return this.maxResults != null + ? this.maxResults + : (Long) BigQueryJdbcUrlUtility.DEFAULT_MAX_RESULTS_VALUE; + } + + public String getDefaultDataset() { + return defaultDataset; + } + + public void setDefaultDataset(String defaultDataset) { + this.defaultDataset = defaultDataset; + } + + public String getLocation() { + return location; + } + + public void setLocation(String location) { + this.location = location; + } + + public String getUserAgent() { + return userAgent; + } + + public void setUserAgent(String userAgent) { + this.userAgent = userAgent; + } + + public String getPartnerToken() { + return partnerToken; + } + + public void setPartnerToken(String partnerToken) { + // This property is expected to be set by partners only. For more details on exact format + // supported, refer b/396086960 + this.partnerToken = partnerToken; + } + + public Boolean getEnableHighThroughputAPI() { + return enableHighThroughputAPI != null + ? enableHighThroughputAPI + : BigQueryJdbcUrlUtility.DEFAULT_ENABLE_HTAPI_VALUE; + } + + public void setEnableHighThroughputAPI(Boolean enableHighThroughputAPI) { + this.enableHighThroughputAPI = enableHighThroughputAPI; + } + + public Integer getHighThroughputMinTableSize() { + return highThroughputMinTableSize != null + ? highThroughputMinTableSize + : (Integer) BigQueryJdbcUrlUtility.DEFAULT_HTAPI_MIN_TABLE_SIZE_VALUE; + } + + public Integer getHighThroughputActivationRatio() { + return highThroughputActivationRatio != null + ? highThroughputActivationRatio + : (Integer) BigQueryJdbcUrlUtility.DEFAULT_HTAPI_ACTIVATION_RATIO_VALUE; + } + + public Long getConnectionPoolSize() { + return connectionPoolSize != null + ? connectionPoolSize + : BigQueryJdbcUrlUtility.DEFAULT_CONNECTION_POOL_SIZE_VALUE; + } + + public void setConnectionPoolSize(Long connectionPoolSize) { + this.connectionPoolSize = connectionPoolSize; + } + + public Long getListenerPoolSize() { + return listenerPoolSize != null + ? listenerPoolSize + : BigQueryJdbcUrlUtility.DEFAULT_LISTENER_POOL_SIZE_VALUE; + } + + public void setListenerPoolSize(Long listenerPoolSize) { + this.listenerPoolSize = listenerPoolSize; + } + + public void setHighThroughputMinTableSize(Integer highThroughputMinTableSize) { + this.highThroughputMinTableSize = highThroughputMinTableSize; + } + + public void setHighThroughputActivationRatio(Integer highThroughputActivationRatio) { + this.highThroughputActivationRatio = highThroughputActivationRatio; + } + + public void setKmsKeyName(String kmsKeyName) { + this.kmsKeyName = kmsKeyName; + } + + public String getKmsKeyName() { + return this.kmsKeyName; + } + + public void setQueryProperties(Map queryProperties) { + this.queryProperties = queryProperties == null ? null : ImmutableMap.copyOf(queryProperties); + } + + public Map getQueryProperties() { + return this.queryProperties; + } + + public void setUnsupportedHTAPIFallback(Boolean unsupportedHTAPIFallback) { + this.unsupportedHTAPIFallback = unsupportedHTAPIFallback; + } + + public Boolean getUnsupportedHTAPIFallback() { + return this.unsupportedHTAPIFallback != null + ? this.unsupportedHTAPIFallback + : BigQueryJdbcUrlUtility.DEFAULT_UNSUPPORTED_HTAPI_FALLBACK_VALUE; + } + + public Boolean getEnableSession() { + return enableSession != null + ? enableSession + : BigQueryJdbcUrlUtility.DEFAULT_ENABLE_SESSION_VALUE; + } + + public void setEnableSession(Boolean enableSession) { + this.enableSession = enableSession; + } + + public String getLogLevel() { + return logLevel; + } + + public void setLogLevel(String logLevel) { + this.logLevel = logLevel; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public String getUniverseDomain() { + return universeDomain != null + ? universeDomain + : BigQueryJdbcUrlUtility.DEFAULT_UNIVERSE_DOMAIN_VALUE; + } + + public void setUniverseDomain(String universeDomain) { + this.universeDomain = universeDomain; + } + + public String getProxyHost() { + return proxyHost; + } + + public void setProxyHost(String proxyHost) { + this.proxyHost = proxyHost; + } + + public String getProxyPort() { + return proxyPort; + } + + public void setProxyPort(String proxyPort) { + this.proxyPort = proxyPort; + } + + public String getProxyUid() { + return proxyUid; + } + + public void setProxyUid(String proxyUid) { + this.proxyUid = proxyUid; + } + + public String getProxyPwd() { + return proxyPwd; + } + + public void setProxyPwd(String proxyPwd) { + this.proxyPwd = proxyPwd; + } + + public Integer getOAuthType() { + return oAuthType != null ? oAuthType : BigQueryJdbcUrlUtility.DEFAULT_OAUTH_TYPE_VALUE; + } + + public void setOAuthType(Integer oAuthType) { + this.oAuthType = oAuthType; + } + + public String getOAuthServiceAcctEmail() { + return oAuthServiceAcctEmail; + } + + public void setOAuthServiceAcctEmail(String oAuthServiceAcctEmail) { + this.oAuthServiceAcctEmail = oAuthServiceAcctEmail; + } + + public String getOAuthPvtKeyPath() { + return oAuthPvtKeyPath; + } + + public String getOAuthPvtKey() { + return oAuthPvtKey; + } + + public void setOAuthPvtKey(String oAuthPvtKey) { + this.oAuthPvtKey = oAuthPvtKey; + } + + public void setOAuthPvtKeyPath(String oAuthPvtKeyPath) { + this.oAuthPvtKeyPath = oAuthPvtKeyPath; + } + + public String getOAuthAccessToken() { + return oAuthAccessToken; + } + + public void setOAuthAccessToken(String oAuthAccessToken) { + this.oAuthAccessToken = oAuthAccessToken; + } + + public String getOAuthRefreshToken() { + return oAuthRefreshToken; + } + + public void setOAuthRefreshToken(String oAuthRefreshToken) { + this.oAuthRefreshToken = oAuthRefreshToken; + } + + public Boolean getUseQueryCache() { + return useQueryCache != null ? useQueryCache : BigQueryJdbcUrlUtility.DEFAULT_USE_QUERY_CACHE; + } + + public String getQueryDialect() { + return queryDialect != null ? queryDialect : BigQueryJdbcUrlUtility.DEFAULT_QUERY_DIALECT_VALUE; + } + + public Boolean getAllowLargeResults() { + return allowLargeResults != null + ? allowLargeResults + : BigQueryJdbcUrlUtility.DEFAULT_ALLOW_LARGE_RESULTS; + } + + public String getDestinationTable() { + return destinationTable; + } + + public String getDestinationDataset() { + return destinationDataset; + } + + public Long getDestinationDatasetExpirationTime() { + return destinationDatasetExpirationTime != null + ? destinationDatasetExpirationTime + : (Long) BigQueryJdbcUrlUtility.DEFAULT_DESTINATION_DATASET_EXPIRATION_TIME_VALUE; + } + + public void setUseQueryCache(Boolean useQueryCache) { + this.useQueryCache = useQueryCache; + } + + public void setQueryDialect(String queryDialect) { + this.queryDialect = queryDialect; + } + + public void setAllowLargeResults(Boolean allowLargeResults) { + this.allowLargeResults = allowLargeResults; + } + + public void setDestinationTable(String destinationTable) { + this.destinationTable = destinationTable; + } + + public void setDestinationDataset(String destinationDataset) { + this.destinationDataset = destinationDataset; + } + + public void setDestinationDatasetExpirationTime(long destinationDatasetExpirationTime) { + this.destinationDatasetExpirationTime = destinationDatasetExpirationTime; + } + + public String getOAuthClientId() { + return oAuthClientId; + } + + public void setOAuthClientId(String oAuthClientId) { + this.oAuthClientId = oAuthClientId; + } + + public String getOAuthClientSecret() { + return oAuthClientSecret; + } + + public void setOAuthClientSecret(String oAuthClientSecret) { + this.oAuthClientSecret = oAuthClientSecret; + } + + public Integer getJobCreationMode() { + return jobCreationMode != null + ? jobCreationMode + : (Integer) BigQueryJdbcUrlUtility.DEFAULT_JOB_CREATION_MODE; + } + + public Boolean getUseStatelessQueryMode() { + return getJobCreationMode() == 2; + } + + public void setJobCreationMode(Integer jobCreationMode) { + if (jobCreationMode != null && !VALID_JOB_CREATION_MODES.contains(jobCreationMode)) { + throw new IllegalArgumentException( + String.format( + "Invalid value for %s. Use 1 for JOB_CREATION_REQUIRED and 2 for" + + " JOB_CREATION_OPTIONAL.", + BigQueryJdbcUrlUtility.JOB_CREATION_MODE_PROPERTY_NAME)); + } + this.jobCreationMode = jobCreationMode; + } + + public Boolean getEnableWriteAPI() { + return enableWriteAPI != null + ? enableWriteAPI + : BigQueryJdbcUrlUtility.DEFAULT_ENABLE_WRITE_API_VALUE; + } + + public void setEnableWriteAPI(Boolean enableWriteAPI) { + this.enableWriteAPI = enableWriteAPI; + } + + public String getAdditionalProjects() { + return additionalProjects; + } + + public void setAdditionalProjects(String additionalProjects) { + this.additionalProjects = additionalProjects; + } + + public Boolean getFilterTablesOnDefaultDataset() { + return filterTablesOnDefaultDataset != null + ? filterTablesOnDefaultDataset + : BigQueryJdbcUrlUtility.DEFAULT_FILTER_TABLES_ON_DEFAULT_DATASET_VALUE; + } + + public void setFilterTablesOnDefaultDataset(Boolean filterTablesOnDefaultDataset) { + this.filterTablesOnDefaultDataset = filterTablesOnDefaultDataset; + } + + public Integer getRequestGoogleDriveScope() { + return requestGoogleDriveScope != null + ? requestGoogleDriveScope + : BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE; + } + + public void setRequestGoogleDriveScope(Integer requestGoogleDriveScope) { + this.requestGoogleDriveScope = requestGoogleDriveScope; + } + + public Integer getMetadataFetchThreadCount() { + return metadataFetchThreadCount != null + ? metadataFetchThreadCount + : BigQueryJdbcUrlUtility.DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE; + } + + public void setMetadataFetchThreadCount(Integer metadataFetchThreadCount) { + this.metadataFetchThreadCount = metadataFetchThreadCount; + } + + public String getSSLTrustStorePath() { + return sslTrustStorePath; + } + + public void setSSLTrustStorePath(String sslTrustStorePath) { + this.sslTrustStorePath = sslTrustStorePath; + } + + public String getSSLTrustStorePassword() { + return sslTrustStorePassword; + } + + public void setSSLTrustStorePassword(String sslTrustStorePassword) { + this.sslTrustStorePassword = sslTrustStorePassword; + } + + public Map getLabels() { + return labels; + } + + public void setLabels(Map labels) { + this.labels = labels == null ? null : ImmutableMap.copyOf(labels); + } + + public String getRequestReason() { + return requestReason; + } + + public void setRequestReason(String requestReason) { + this.requestReason = requestReason; + } + + public Integer getTimeout() { + return timeout != null + ? timeout + : (int) BigQueryJdbcUrlUtility.DEFAULT_RETRY_TIMEOUT_IN_SECS_VALUE; + } + + public Integer getJobTimeout() { + return jobTimeout != null ? jobTimeout : (int) BigQueryJdbcUrlUtility.DEFAULT_JOB_TIMEOUT_VALUE; + } + + public Integer getRetryInitialDelay() { + return retryInitialDelay != null + ? retryInitialDelay + : (int) BigQueryJdbcUrlUtility.DEFAULT_RETRY_INITIAL_DELAY_VALUE; + } + + public Integer getRetryMaxDelay() { + return retryMaxDelay != null + ? retryMaxDelay + : (int) BigQueryJdbcUrlUtility.DEFAULT_RETRY_MAX_DELAY_VALUE; + } + + public void setJobTimeout(Integer jobTimeout) { + this.jobTimeout = jobTimeout; + } + + public void setRetryInitialDelay(Integer retryInitialDelay) { + this.retryInitialDelay = retryInitialDelay; + } + + public void setRetryMaxDelay(Integer retryMaxDelay) { + this.retryMaxDelay = retryMaxDelay; + } + + public void setTimeout(Integer timeout) { + this.timeout = timeout; + } + + public Integer getHttpConnectTimeout() { + return httpConnectTimeout; + } + + public void setHttpConnectTimeout(Integer httpConnectTimeout) { + this.httpConnectTimeout = httpConnectTimeout; + } + + public Integer getHttpReadTimeout() { + return httpReadTimeout; + } + + public void setHttpReadTimeout(Integer httpReadTimeout) { + this.httpReadTimeout = httpReadTimeout; + } + + public Long getMaximumBytesBilled() { + return maximumBytesBilled != null + ? maximumBytesBilled + : BigQueryJdbcUrlUtility.DEFAULT_MAX_BYTES_BILLED_VALUE; + } + + public void setMaximumBytesBilled(Long maximumBytesBilled) { + this.maximumBytesBilled = maximumBytesBilled; + } + + public Integer getSwaActivationRowCount() { + return swaActivationRowCount != null + ? swaActivationRowCount + : BigQueryJdbcUrlUtility.DEFAULT_SWA_ACTIVATION_ROW_COUNT_VALUE; + } + + public void setSwaActivationRowCount(Integer swaActivationRowCount) { + this.swaActivationRowCount = swaActivationRowCount; + } + + public Integer getSwaAppendRowCount() { + return swaAppendRowCount != null + ? swaAppendRowCount + : BigQueryJdbcUrlUtility.DEFAULT_SWA_APPEND_ROW_COUNT_VALUE; + } + + public void setSwaAppendRowCount(Integer swaAppendRowCount) { + this.swaAppendRowCount = swaAppendRowCount; + } + + public String getOAuthP12Password() { + return oAuthP12Password != null + ? oAuthP12Password + : BigQueryJdbcUrlUtility.DEFAULT_OAUTH_P12_PASSWORD_VALUE; + } + + public void setOAuthP12Password(String oAuthP12Password) { + this.oAuthP12Password = oAuthP12Password; + } + + public String getOAuthSAImpersonationEmail() { + return oAuthSAImpersonationEmail; + } + + public void setOAuthSAImpersonationEmail(String oAuthSAImpersonationEmail) { + this.oAuthSAImpersonationEmail = oAuthSAImpersonationEmail; + } + + public String getOAuthSAImpersonationChain() { + return oAuthSAImpersonationChain; + } + + public void setOAuthSAImpersonationChain(String oAuthSAImpersonationChain) { + this.oAuthSAImpersonationChain = oAuthSAImpersonationChain; + } + + public String getOAuthSAImpersonationScopes() { + return oAuthSAImpersonationScopes; + } + + public void setOAuthSAImpersonationScopes(String oAuthSAImpersonationScopes) { + this.oAuthSAImpersonationScopes = oAuthSAImpersonationScopes; + } + + public String getOAuthSAImpersonationTokenLifetime() { + return oAuthSAImpersonationTokenLifetime; + } + + public void setOAuthSAImpersonationTokenLifetime(String oAuthSAImpersonationTokenLifetime) { + this.oAuthSAImpersonationTokenLifetime = oAuthSAImpersonationTokenLifetime; + } + + public String getOAuth2TokenUri() { + return oAuth2TokenUri; + } + + public void setOAuth2TokenUri(String oAuth2TokenUri) { + this.oAuth2TokenUri = oAuth2TokenUri; + } + + public String getByoidAudienceUri() { + return byoidAudienceUri; + } + + public void setByoidAudienceUri(String byoidAudienceUri) { + this.byoidAudienceUri = byoidAudienceUri; + } + + public String getByoidCredentialSource() { + return byoidCredentialSource; + } + + public void setByoidCredentialSource(String byoidCredentialSource) { + this.byoidCredentialSource = byoidCredentialSource; + } + + public String getByoidPoolUserProject() { + return byoidPoolUserProject; + } + + public void setByoidPoolUserProject(String byoidPoolUserProject) { + this.byoidPoolUserProject = byoidPoolUserProject; + } + + public String getByoidSAImpersonationUri() { + return byoidSAImpersonationUri; + } + + public void setByoidSAImpersonationUri(String byoidSAImpersonationUri) { + this.byoidSAImpersonationUri = byoidSAImpersonationUri; + } + + public String getByoidSubjectTokenType() { + return byoidSubjectTokenType != null + ? byoidSubjectTokenType + : BigQueryJdbcUrlUtility.DEFAULT_BYOID_SUBJECT_TOKEN_TYPE_VALUE; + } + + public void setByoidSubjectTokenType(String byoidSubjectTokenType) { + this.byoidSubjectTokenType = byoidSubjectTokenType; + } + + public String getByoidTokenUri() { + return byoidTokenUri != null + ? byoidTokenUri + : BigQueryJdbcUrlUtility.DEFAULT_BYOID_TOKEN_URI_VALUE; + } + + public void setByoidTokenUri(String byoidTokenUri) { + this.byoidTokenUri = byoidTokenUri; + } + + public String getEndpointOverrides() { + return endpointOverrides; + } + + public void setEndpointOverrides(String endpointOverrides) { + this.endpointOverrides = endpointOverrides; + } + + public String getPrivateServiceConnect() { + return privateServiceConnect; + } + + public void setPrivateServiceConnect(String privateServiceConnect) { + this.privateServiceConnect = privateServiceConnect; + } + + @Override + public PrintWriter getLogWriter() { + return null; + } + + @Override + public void setLogWriter(PrintWriter out) {} + + @Override + public void setLoginTimeout(int seconds) {} + + @Override + public int getLoginTimeout() { + return 0; + } + + @Override + public Logger getParentLogger() { + return BigQueryJdbcRootLogger.getRootLogger(); + } + + @Override + public T unwrap(Class iface) { + return null; + } + + @Override + public boolean isWrapperFor(Class iface) { + return false; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSource.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSource.java new file mode 100644 index 0000000000..66a957a06f --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSource.java @@ -0,0 +1,67 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.common.annotations.VisibleForTesting; +import java.sql.Connection; +import java.sql.SQLException; +import javax.sql.ConnectionPoolDataSource; +import javax.sql.PooledConnection; + +public class PooledConnectionDataSource extends DataSource implements ConnectionPoolDataSource { + private PooledConnectionListener connectionPoolManager = null; + Connection bqConnection = null; + + @Override + public PooledConnection getPooledConnection() throws SQLException { + if (connectionPoolManager != null && !connectionPoolManager.isConnectionPoolEmpty()) { + return connectionPoolManager.getPooledConnection(); + } + // Create the Underlying physical connection + if (bqConnection == null) { + bqConnection = super.getConnection(); + } + if (bqConnection == null) { + throw new BigQueryJdbcRuntimeException( + "Cannot get pooled connection: unable to get underlying physical connection"); + } + Long connectionPoolSize = ((BigQueryConnection) bqConnection).getConnectionPoolSize(); + if (connectionPoolManager == null) { + connectionPoolManager = new PooledConnectionListener(connectionPoolSize); + } + BigQueryPooledConnection bqPooledConnection = + new BigQueryPooledConnection((BigQueryConnection) bqConnection); + bqPooledConnection.addConnectionEventListener(connectionPoolManager); + return bqPooledConnection; + } + + @VisibleForTesting + void setConnection(Connection connection) { + this.bqConnection = connection; + } + + @VisibleForTesting + public PooledConnectionListener getConnectionPoolManager() { + return this.connectionPoolManager; + } + + @Override + public PooledConnection getPooledConnection(String arg0, String arg1) throws SQLException { + throw new UnsupportedOperationException("This operation is not supported by the driver"); + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionListener.java b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionListener.java new file mode 100644 index 0000000000..9f3b210443 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/java/com/google/cloud/bigquery/jdbc/PooledConnectionListener.java @@ -0,0 +1,143 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import java.util.UUID; +import java.util.concurrent.LinkedBlockingDeque; +import javax.sql.ConnectionEvent; +import javax.sql.ConnectionEventListener; +import javax.sql.PooledConnection; + +public class PooledConnectionListener implements ConnectionEventListener { + private final BigQueryJdbcCustomLogger LOG = new BigQueryJdbcCustomLogger(this.toString()); + private String id; // Mainly for internal use + private LinkedBlockingDeque connectionPool; + private Long connectionPoolSize = 0L; + + public PooledConnectionListener(Long connPoolSize) { + id = UUID.randomUUID().toString(); + this.connectionPoolSize = connPoolSize; + if (getConnectionPoolSize() > 0L) { + connectionPool = new LinkedBlockingDeque<>(getConnectionPoolSize().intValue()); + } else { + connectionPool = new LinkedBlockingDeque<>(); + } + } + + public Long getConnectionPoolSize() { + return this.connectionPoolSize; + } + + public int getConnectionPoolCurrentCapacity() { + return this.connectionPool.size(); + } + + public boolean isConnectionPoolEmpty() { + return (connectionPool != null && connectionPool.isEmpty()); + } + + PooledConnection getPooledConnection() { + if (isConnectionPoolEmpty()) { + LOG.warning("Connection pool is empty"); + return null; + } + // Return the first element. + return connectionPool.getFirst(); + } + + void addConnection(PooledConnection connection) { + LOG.finest("++enter++"); + if (connection == null) { + LOG.warning("Connection passed in is null"); + return; + } + if (connectionPool.contains(connection)) { + LOG.warning("Connection already in the pool"); + return; + } + connectionPool.add(connection); + } + + void removeConnection(PooledConnection connection) { + LOG.finest("++enter++"); + if (connection == null) { + LOG.warning("Connection passed in is null"); + return; + } + if (!connectionPool.contains(connection)) { + LOG.warning("Connection already in the pool"); + return; + } + connectionPool.remove(connection); + } + + @Override + public void connectionClosed(ConnectionEvent event) { + LOG.finest("++enter++"); + Object eventSource = event.getSource(); + if (eventSource == null + || !(eventSource instanceof BigQueryPooledConnection) + || !(eventSource.getClass().isAssignableFrom(BigQueryPooledConnection.class))) { + throw new IllegalArgumentException( + "Invalid ConnectionEvent source passed to connectionClosed. Expecting" + + " BigQueryPooledConnection."); + } + BigQueryPooledConnection bqPooledConnection = (BigQueryPooledConnection) eventSource; + addConnection(bqPooledConnection); + LOG.finest("Added pooled connection to connection pool"); + } + + @Override + public void connectionErrorOccurred(ConnectionEvent event) { + LOG.finest("++enter++"); + Object eventSource = event.getSource(); + if (eventSource == null + || !(eventSource instanceof BigQueryPooledConnection) + || !(eventSource.getClass().isAssignableFrom(BigQueryPooledConnection.class))) { + throw new IllegalArgumentException( + "Invalid ConnectionEvent source passed to connectionClosed. Expecting" + + " BigQueryPooledConnection."); + } + BigQueryPooledConnection bqPooledConnection = (BigQueryPooledConnection) eventSource; + removeConnection(bqPooledConnection); + String errorMessage = + (event.getSQLException() != null) + ? event.getSQLException().getMessage() + : "Connection error occured"; + LOG.finest("Removed pooled connection from connection pool. Error: %s", errorMessage); + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + ((id == null) ? 0 : id.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PooledConnectionListener other = (PooledConnectionListener) obj; + if (id == null) { + if (other.id != null) return false; + } else if (!id.equals(other.id)) return false; + return true; + } +} diff --git a/google-cloud-bigquery-jdbc/src/main/resources/META-INF/services/java.sql.Driver b/google-cloud-bigquery-jdbc/src/main/resources/META-INF/services/java.sql.Driver new file mode 100644 index 0000000000..1ea35896be --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/META-INF/services/java.sql.Driver @@ -0,0 +1 @@ +com.google.cloud.bigquery.jdbc.BigQueryDriver diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetCrossReference.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetCrossReference.sql new file mode 100644 index 0000000000..da83862704 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetCrossReference.sql @@ -0,0 +1,72 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT PKTABLE_CAT, + PKTABLE_SCHEM, + PKTABLE_NAME, + PRIMARY.column_name AS PKCOLUMN_NAME, + FOREIGN.constraint_catalog AS FKTABLE_CAT, + FOREIGN.constraint_schema AS FKTABLE_SCHEM, + FOREIGN.table_name AS FKTABLE_NAME, + FOREIGN.column_name AS FKCOLUMN_NAME, + FOREIGN.ordinal_position AS KEY_SEQ, + NULL AS UPDATE_RULE, + NULL AS DELETE_RULE, + FOREIGN.constraint_name AS FK_NAME, + PRIMARY.constraint_name AS PK_NAME, + NULL AS DEFERRABILITY +FROM (SELECT DISTINCT CCU.table_catalog AS PKTABLE_CAT, + CCU.table_schema AS PKTABLE_SCHEM, + CCU.table_name AS PKTABLE_NAME, + TC.constraint_catalog, + TC.constraint_schema, + TC.constraint_name, + TC.table_catalog, + TC.table_schema, + TC.table_name, + TC.constraint_type, + KCU.column_name, + KCU.ordinal_position, + KCU.position_in_unique_constraint + FROM `%1$s.%2$s.INFORMATION_SCHEMA.TABLE_CONSTRAINTS` TC + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` KCU + USING + (constraint_catalog, + constraint_schema, + constraint_name, + table_catalog, + table_schema, + table_name) + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE` CCU + USING + (constraint_catalog, + constraint_schema, + constraint_name) + WHERE constraint_type = 'FOREIGN KEY' + AND TC.table_name = '%6$s') FOREIGN + INNER JOIN (SELECT * + FROM `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` + WHERE position_in_unique_constraint IS NULL + AND RTRIM(table_name) = '%3$s') PRIMARY +ON + FOREIGN.PKTABLE_CAT = PRIMARY.table_catalog + AND FOREIGN.PKTABLE_SCHEM = PRIMARY.table_schema + AND FOREIGN.PKTABLE_NAME = PRIMARY.table_name + AND FOREIGN.position_in_unique_constraint = + PRIMARY.ordinal_position +ORDER BY FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, KEY_SEQ \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetExportedKeys.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetExportedKeys.sql new file mode 100644 index 0000000000..4058f6bff6 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetExportedKeys.sql @@ -0,0 +1,71 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT PKTABLE_CAT, + PKTABLE_SCHEM, + PKTABLE_NAME, + PRIMARY.column_name AS PKCOLUMN_NAME, + FOREIGN.constraint_catalog AS FKTABLE_CAT, + FOREIGN.constraint_schema AS FKTABLE_SCHEM, + FOREIGN.table_name AS FKTABLE_NAME, + FOREIGN.column_name AS FKCOLUMN_NAME, + FOREIGN.ordinal_position AS KEY_SEQ, + NULL AS UPDATE_RULE, + NULL AS DELETE_RULE, + FOREIGN.constraint_name AS FK_NAME, + PRIMARY.constraint_name AS PK_NAME, + NULL AS DEFERRABILITY +FROM (SELECT DISTINCT CCU.table_catalog AS PKTABLE_CAT, + CCU.table_schema AS PKTABLE_SCHEM, + CCU.table_name AS PKTABLE_NAME, + TC.constraint_catalog, + TC.constraint_schema, + TC.constraint_name, + TC.table_catalog, + TC.table_schema, + TC.table_name, + TC.constraint_type, + KCU.column_name, + KCU.ordinal_position, + KCU.position_in_unique_constraint + FROM `%1$s.%2$s.INFORMATION_SCHEMA.TABLE_CONSTRAINTS` TC + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` KCU + USING + (constraint_catalog, + constraint_schema, + constraint_name, + table_catalog, + table_schema, + table_name) + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE` CCU + USING + (constraint_catalog, + constraint_schema, + constraint_name) + WHERE constraint_type = 'FOREIGN KEY') FOREIGN + INNER JOIN (SELECT * + FROM `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` + WHERE position_in_unique_constraint IS NULL + AND RTRIM(table_name) = '%3$s') PRIMARY +ON + FOREIGN.PKTABLE_CAT = PRIMARY.table_catalog + AND FOREIGN.PKTABLE_SCHEM = PRIMARY.table_schema + AND FOREIGN.PKTABLE_NAME = PRIMARY.table_name + AND FOREIGN.position_in_unique_constraint = + PRIMARY.ordinal_position +ORDER BY FKTABLE_CAT, FKTABLE_SCHEM, FKTABLE_NAME, KEY_SEQ \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetImportedKeys.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetImportedKeys.sql new file mode 100644 index 0000000000..3f4142eb05 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetImportedKeys.sql @@ -0,0 +1,71 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT PKTABLE_CAT, + PKTABLE_SCHEM, + PKTABLE_NAME, + PRIMARY.column_name AS PKCOLUMN_NAME, + FOREIGN.constraint_catalog AS FKTABLE_CAT, + FOREIGN.constraint_schema AS FKTABLE_SCHEM, + FOREIGN.table_name AS FKTABLE_NAME, + FOREIGN.column_name AS FKCOLUMN_NAME, + FOREIGN.ordinal_position AS KEY_SEQ, + NULL AS UPDATE_RULE, + NULL AS DELETE_RULE, + FOREIGN.constraint_name AS FK_NAME, + PRIMARY.constraint_name AS PK_NAME, + NULL AS DEFERRABILITY +FROM (SELECT DISTINCT CCU.table_catalog AS PKTABLE_CAT, + CCU.table_schema AS PKTABLE_SCHEM, + CCU.table_name AS PKTABLE_NAME, + TC.constraint_catalog, + TC.constraint_schema, + TC.constraint_name, + TC.table_catalog, + TC.table_schema, + TC.table_name, + TC.constraint_type, + KCU.column_name, + KCU.ordinal_position, + KCU.position_in_unique_constraint + FROM `%1$s.%2$s.INFORMATION_SCHEMA.TABLE_CONSTRAINTS` TC + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` KCU + USING + (constraint_catalog, + constraint_schema, + constraint_name, + table_catalog, + table_schema, + table_name) + INNER JOIN + `%1$s.%2$s.INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE` CCU + USING + (constraint_catalog, + constraint_schema, + constraint_name) + WHERE constraint_type = 'FOREIGN KEY' + AND TC.table_name = '%3$s') FOREIGN + INNER JOIN (SELECT * + FROM `%1$s.%2$s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE` + WHERE position_in_unique_constraint IS NULL) PRIMARY +ON + FOREIGN.PKTABLE_CAT = PRIMARY.table_catalog + AND FOREIGN.PKTABLE_SCHEM = PRIMARY.table_schema + AND FOREIGN.PKTABLE_NAME = PRIMARY.table_name + AND FOREIGN.position_in_unique_constraint = + PRIMARY.ordinal_position +ORDER BY PKTABLE_CAT, PKTABLE_SCHEM, PKTABLE_NAME, KEY_SEQ \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetPrimaryKeys.sql b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetPrimaryKeys.sql new file mode 100644 index 0000000000..282910fb97 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/DatabaseMetaData_GetPrimaryKeys.sql @@ -0,0 +1,30 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +SELECT table_catalog AS TABLE_CAT, + table_schema AS TABLE_SCHEM, + table_name AS TABLE_NAME, + column_name AS COLUMN_NAME, + ordinal_position AS KEY_SEQ, + constraint_name AS PK_NAME +FROM + %s.%s.INFORMATION_SCHEMA.KEY_COLUMN_USAGE +WHERE + table_name = '%s' + AND CONTAINS_SUBSTR(constraint_name + , 'pk$') +ORDER BY + COLUMN_NAME; diff --git a/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/dependencies.properties b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/dependencies.properties new file mode 100644 index 0000000000..6908cb6cc7 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/main/resources/com/google/cloud/bigquery/jdbc/dependencies.properties @@ -0,0 +1,4 @@ +# Versions of oneself +# {x-version-update-start:google-cloud-bigquery-jdbc:current} +version.jdbc=${project.version} +# {x-version-update-end} \ No newline at end of file diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/ArrowFormatTypeBigQueryCoercionUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/ArrowFormatTypeBigQueryCoercionUtilityTest.java new file mode 100644 index 0000000000..0524fc87d1 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/ArrowFormatTypeBigQueryCoercionUtilityTest.java @@ -0,0 +1,234 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryTypeCoercionUtility.INSTANCE; +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.FEBRUARY; +import static java.time.Month.JANUARY; + +import com.google.cloud.bigquery.FieldElementType; +import com.google.cloud.bigquery.Range; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.Duration; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.Period; +import org.apache.arrow.vector.PeriodDuration; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; +import org.apache.arrow.vector.util.Text; +import org.junit.Rule; +import org.junit.Test; + +public class ArrowFormatTypeBigQueryCoercionUtilityTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private static final Range RANGE_DATE = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .setStart("1970-01-02") + .setEnd("1970-03-04") + .build(); + + private static final Range RANGE_DATETIME = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .setStart("2014-08-19 05:41:35.220000") + .setEnd("2015-09-20 06:41:35.220000") + .build(); + + private static final Range RANGE_TIMESTAMP = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd("2015-09-20 13:41:35.220000+01:00") + .build(); + + @Test + public void nullToString() { + assertThat(INSTANCE.coerceTo(String.class, null)).isNull(); + } + + @Test + public void JsonStringArrayListToString() { + JsonStringArrayList employeeList = new JsonStringArrayList<>(); + employeeList.add(1); + employeeList.add(2); + employeeList.add(3); + + assertThat(INSTANCE.coerceTo(String.class, employeeList)).isEqualTo("[1,2,3]"); + } + + @Test + public void localDateTimeToTimestamp() { + LocalDateTime localDatetime = LocalDateTime.of(1995, FEBRUARY, 23, 20, 15); + + assertThat(INSTANCE.coerceTo(Timestamp.class, localDatetime)) + .isEqualTo(Timestamp.valueOf(localDatetime)); + } + + @Test + public void textToString() { + Text text = new Text("Hello World!"); + + assertThat(INSTANCE.coerceTo(String.class, text)).isEqualTo("Hello World!"); + } + + @Test + public void nullToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, null)).isEqualTo(0); + } + + @Test + public void textToInteger() { + Text text = new Text("51423"); + + assertThat(INSTANCE.coerceTo(Integer.class, text)).isEqualTo(51423); + } + + @Test + public void longToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, 56L)).isEqualTo(56); + } + + @Test + public void bigDecimalToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, new BigDecimal("56"))).isEqualTo(56); + } + + @Test + public void nullToLong() { + assertThat(INSTANCE.coerceTo(Long.class, null)).isEqualTo(0L); + } + + @Test + public void bigDecimalToLong() { + assertThat(INSTANCE.coerceTo(Long.class, new BigDecimal("56"))).isEqualTo(56L); + } + + @Test + public void nullToDouble() { + assertThat(INSTANCE.coerceTo(Double.class, null)).isEqualTo(0D); + } + + @Test + public void bigDecimalToDouble() { + assertThat(INSTANCE.coerceTo(Double.class, new BigDecimal("56"))).isEqualTo(56D); + } + + @Test + public void nullToBoolean() { + assertThat(INSTANCE.coerceTo(Boolean.class, null)).isFalse(); + } + + @Test + public void nullToByteArray() { + assertThat(INSTANCE.coerceTo(byte[].class, null)).isNull(); + } + + @Test + public void nullToTimestamp() { + assertThat(INSTANCE.coerceTo(Timestamp.class, null)).isNull(); + } + + @Test + public void longToTimestamp() { + assertThat(INSTANCE.coerceTo(Timestamp.class, 1408452095220000L)) + .isEqualTo(new Timestamp(1408452095220L)); + } + + @Test + public void nullToTime() { + assertThat(INSTANCE.coerceTo(Time.class, null)).isNull(); + } + + @Test + public void longToTime() { + assertThat(INSTANCE.coerceTo(Time.class, 1408452095220000L)) + .isEqualTo(new Time(1408452095000L)); + } + + @Test + public void nullToDate() { + assertThat(INSTANCE.coerceTo(Date.class, null)).isNull(); + } + + @Test + public void integerToDate() { + LocalDate expectedDate = LocalDate.of(2022, JANUARY, 1); + assertThat(INSTANCE.coerceTo(Date.class, 18993).toLocalDate()).isEqualTo(expectedDate); + } + + @Test + public void periodDurationToString() { + Period period = Period.of(1, 3, 24); + Duration duration = Duration.ofHours(3).plusMinutes(45).plusSeconds(23).plusNanos(123456000); + PeriodDuration periodDuration = new PeriodDuration(period, duration); + assertThat(INSTANCE.coerceTo(String.class, periodDuration)).isEqualTo("1-3 24 3:45:23.123456"); + + Period period2 = Period.of(1, 6, -8); + Duration duration2 = Duration.ofHours(9).plusMinutes(43).plusSeconds(23).plusNanos(123456000); + PeriodDuration periodDuration2 = new PeriodDuration(period2, duration2); + assertThat(INSTANCE.coerceTo(String.class, periodDuration2)).isEqualTo("1-6 -8 9:43:23.123456"); + } + + // Range tests + + @Test + public void JsonStringHashMapToString() { + JsonStringHashMap employeeMap = new JsonStringHashMap<>(); + employeeMap.putIfAbsent("name1", "type1"); + employeeMap.putIfAbsent("name2", "type2"); + employeeMap.putIfAbsent("name3", "type3"); + + assertThat(INSTANCE.coerceTo(String.class, employeeMap)) + .isEqualTo("{\"name1\":\"type1\",\"name2\":\"type2\",\"name3\":\"type3\"}"); + } + + @Test + public void rangeDateToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATE.getStart().getStringValue(), RANGE_DATE.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE)).isEqualTo(expectedRangeDate); + } + + @Test + public void rangeDatetimeToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATETIME.getStart().getStringValue(), RANGE_DATETIME.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATETIME)).isEqualTo(expectedRangeDate); + } + + @Test + public void rangeTimestampToString() { + String expectedRangeTimestamp = + String.format( + "[%s, %s)", + RANGE_TIMESTAMP.getStart().getStringValue(), RANGE_TIMESTAMP.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_TIMESTAMP)).isEqualTo(expectedRangeTimestamp); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfPrimitivesTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfPrimitivesTest.java new file mode 100644 index 0000000000..5b33fda788 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfPrimitivesTest.java @@ -0,0 +1,367 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowArraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; +import static java.time.Month.MARCH; +import static java.util.Arrays.copyOfRange; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.stream.Stream; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class BigQueryArrowArrayOfPrimitivesTest { + + private final Field schema; + private final JsonStringArrayList arrayValues; + private final Object[] expected; + private final int javaSqlTypeCode; + private Array array; + private final StandardSQLTypeName currentType; + + @ClassRule public static final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + public BigQueryArrowArrayOfPrimitivesTest( + StandardSQLTypeName currentType, + Tuple> schemaAndValue, + Object[] expected, + int javaSqlTypeCode) { + this.currentType = currentType; + this.schema = schemaAndValue.x(); + this.arrayValues = schemaAndValue.y(); + this.expected = expected; + this.javaSqlTypeCode = javaSqlTypeCode; + } + + @Before + public void setUp() { + array = new BigQueryArrowArray(this.schema, this.arrayValues); + } + + @Parameters(name = "{index}: primitive array of {0}") + public static Collection data() { + timeZoneRule.enforce(); + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820227); + return Arrays.asList( + new Object[][] { + { + INT64, + arrowArraySchemaAndValue(INT64, 10L, 20L, 30L, 40L), + new Long[] {10L, 20L, 30L, 40L}, + Types.BIGINT + }, + { + BOOL, + arrowArraySchemaAndValue(BOOL, TRUE, FALSE, FALSE, TRUE), + new Boolean[] {true, false, false, true}, + Types.BOOLEAN + }, + { + FLOAT64, + arrowArraySchemaAndValue( + FLOAT64, + Double.valueOf("11.2"), + Double.valueOf("33.4"), + Double.valueOf("55.6"), + Double.valueOf("77.8")), + new Double[] {11.2, 33.4, 55.6, 77.8}, + Types.DOUBLE + }, + { + NUMERIC, + arrowArraySchemaAndValue( + NUMERIC, + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657")), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + BIGNUMERIC, + arrowArraySchemaAndValue( + BIGNUMERIC, + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657")), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + STRING, + arrowArraySchemaAndValue( + STRING, new Text("one"), new Text("two"), new Text("three"), new Text("four")), + new String[] {"one", "two", "three", "four"}, + Types.NVARCHAR + }, + { + TIMESTAMP, + arrowArraySchemaAndValue( + TIMESTAMP, + Long.valueOf("1680174859820227"), + Long.valueOf("1680261259820227"), + Long.valueOf("1680347659820227"), + Long.valueOf("1680434059820227")), + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)), + Timestamp.valueOf(aTimeStamp.plusDays(2)), + Timestamp.valueOf(aTimeStamp.plusDays(3)) + }, + Types.TIMESTAMP + }, + { + DATE, + arrowArraySchemaAndValue(DATE, 19446, 19447, 19448, 19449), + new Date[] { + Date.valueOf(aDate), + Date.valueOf(aDate.plusDays(1)), + Date.valueOf(aDate.plusDays(2)), + Date.valueOf(aDate.plusDays(3)) + }, + Types.DATE + }, + { + TIME, + arrowArraySchemaAndValue( + TIME, + Long.valueOf("40459820227"), // 11:14:19.820227 + Long.valueOf("40460820227"), + Long.valueOf("40461820227"), + Long.valueOf("40462820227")), + new Time[] { + Time.valueOf(aTime), + Time.valueOf(aTime.plusSeconds(1)), + Time.valueOf(aTime.plusSeconds(2)), + Time.valueOf(aTime.plusSeconds(3)) + }, + Types.TIME + }, + { + DATETIME, + arrowArraySchemaAndValue( + DATETIME, + LocalDateTime.parse("2023-03-30T11:14:19.820227"), + LocalDateTime.parse("2023-03-30T11:15:19.820227"), + LocalDateTime.parse("2023-03-30T11:16:19.820227"), + LocalDateTime.parse("2023-03-30T11:17:19.820227")), + new Timestamp[] { + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:14:19.820227")), + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:15:19.820227")), + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:16:19.820227")), + Timestamp.valueOf(LocalDateTime.parse("2023-03-30T11:17:19.820227")) + }, + Types.TIMESTAMP + }, + { + GEOGRAPHY, + arrowArraySchemaAndValue( + GEOGRAPHY, + new Text("POINT(-122 47)"), + new Text("POINT(-122 48)"), + new Text("POINT(-121 47)"), + new Text("POINT(-123 48)")), + new String[] {"POINT(-122 47)", "POINT(-122 48)", "POINT(-121 47)", "POINT(-123 48)"}, + Types.OTHER + }, + { + BYTES, + arrowArraySchemaAndValue( + BYTES, + Stream.of("one", "two", "three", "four") + .map(String::getBytes) + .toArray(byte[][]::new)), // array of bytes array + new byte[][] { + "one".getBytes(), "two".getBytes(), "three".getBytes(), "four".getBytes() + }, + Types.VARBINARY + } + }); + } + + @Test + public void getArray() throws SQLException { + assertThat(array.getArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedArray() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + assertThat(array.getArray(fromIndex + 1, 2)).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = this.array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(columnValues.toArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedResultSet() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + ResultSet resultSet = array.getResultSet(fromIndex + 1, 2); + + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(columnValues.toArray()).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(this.currentType.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(this.javaSqlTypeCode); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getResultSet()); + ensureArrayIsInvalid(() -> array.getResultSet(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getResultSetWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getResultSet(emptyMap())); + Exception exception2 = + assertThrows( + SQLFeatureNotSupportedException.class, () -> array.getResultSet(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfStructTest.java new file mode 100644 index 0000000000..7cb84e70ab --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowArrayOfStructTest.java @@ -0,0 +1,205 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.LegacySQLTypeName.RECORD; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowArrayOf; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowStructOf; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Types; +import java.util.ArrayList; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; + +public class BigQueryArrowArrayOfStructTest { + + private Array array; + + @Before + public void setUp() { + FieldList profileSchema = + FieldList.of( + Field.newBuilder("name", LegacySQLTypeName.STRING).build(), + Field.newBuilder("age", LegacySQLTypeName.INTEGER).build(), + Field.newBuilder("adult", LegacySQLTypeName.BOOLEAN).build()); + + JsonStringHashMap record1 = + arrowStructOf( + Tuple.of(STRING, new Text("Arya")), Tuple.of(INT64, 15L), Tuple.of(BOOL, FALSE)) + .y(); + JsonStringHashMap record2 = + arrowStructOf( + Tuple.of(STRING, new Text("Khal Drogo")), + Tuple.of(INT64, 35L), + Tuple.of(BOOL, TRUE)) + .y(); + JsonStringHashMap record3 = + arrowStructOf( + Tuple.of(STRING, new Text("Ned Stark")), Tuple.of(INT64, 45L), Tuple.of(BOOL, TRUE)) + .y(); + JsonStringHashMap record4 = + arrowStructOf( + Tuple.of(STRING, new Text("Jon Snow")), Tuple.of(INT64, 25L), Tuple.of(BOOL, TRUE)) + .y(); + + Field arrayOfStructSchema = + Field.newBuilder("profiles", RECORD, profileSchema).setMode(Mode.REPEATED).build(); + + JsonStringArrayList> arrayOfStructValue = + arrowArrayOf(record1, record2, record3, record4); + array = new BigQueryArrowArray(arrayOfStructSchema, arrayOfStructValue); + } + + @Test + public void getArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(); + + assertThat(structArray.length).isEqualTo(4); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[2].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structArray[3].getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(2, 2); + + assertThat(structArray.length).isEqualTo(2); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(2).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structs.get(3).getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(2, 2); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(StandardSQLTypeName.STRUCT.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(Types.STRUCT); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getResultSetWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getResultSet(emptyMap())); + Exception exception2 = + assertThrows( + SQLFeatureNotSupportedException.class, () -> array.getResultSet(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSetTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSetTest.java new file mode 100644 index 0000000000..efde49309d --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowResultSetTest.java @@ -0,0 +1,358 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.utils.ArrowUtilities.serializeSchema; +import static com.google.cloud.bigquery.jdbc.utils.ArrowUtilities.serializeVectorSchemaRoot; +import static com.google.common.truth.Truth.assertThat; +import static org.apache.arrow.vector.types.Types.MinorType.INT; +import static org.apache.arrow.vector.types.Types.MinorType.VARCHAR; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Struct; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.DateMilliVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.Float8Vector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.TimeMilliVector; +import org.apache.arrow.vector.TimeStampMicroVector; +import org.apache.arrow.vector.VarBinaryVector; +import org.apache.arrow.vector.VarCharVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.complex.ListVector; +import org.apache.arrow.vector.complex.StructVector; +import org.apache.arrow.vector.complex.impl.UnionListWriter; +import org.apache.arrow.vector.types.pojo.FieldType; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryArrowResultSetTest { + + private static final FieldList fieldList = + FieldList.of( + Field.of("boolField", StandardSQLTypeName.BOOL), + Field.of("int64Filed", StandardSQLTypeName.INT64), + Field.of("float64Field", StandardSQLTypeName.FLOAT64), + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("timeStampField", StandardSQLTypeName.TIMESTAMP), + Field.of("bytesField", StandardSQLTypeName.BYTES), + Field.newBuilder("intArrayField", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REPEATED) + .build(), + Field.of( + "structField", + StandardSQLTypeName.STRUCT, + Field.of("name", StandardSQLTypeName.STRING), + Field.of("age", StandardSQLTypeName.INT64)), + Field.of("numericField", StandardSQLTypeName.BIGNUMERIC), + Field.of("timeField", StandardSQLTypeName.TIME), + Field.of("dateField", StandardSQLTypeName.DATE)); + + private BigQueryArrowBatchWrapper arrowBatchWrapper; + private BigQueryArrowBatchWrapper arrowBatchWrapperLast; + + private BigQueryStatement statement; + + private BlockingQueue buffer; + private BlockingQueue bufferWithTwoRows; + private static final Schema QUERY_SCHEMA = Schema.of(fieldList); + + private VectorSchemaRoot vectorSchemaRoot; + private BigQueryArrowResultSet bigQueryArrowResultSet; + private BigQueryArrowResultSet bigQueryArrowResultSetNested; + + private VectorSchemaRoot getTestVectorSchemaRoot() { + RootAllocator allocator = new RootAllocator(); + BitVector boolField = + new BitVector("boolField", allocator); // Mapped with StandardSQLTypeName.BOOL + boolField.allocateNew(2); + boolField.set(0, 0); + boolField.setValueCount(1); + IntVector int64Filed = + new IntVector("int64Filed", allocator); // Mapped with StandardSQLTypeName.INT64 + int64Filed.allocateNew(2); + int64Filed.set(0, 1); + int64Filed.setValueCount(1); + Float8Vector float64Field = + new Float8Vector("float64Field", allocator); // Mapped with StandardSQLTypeName.FLOAT64 + float64Field.allocateNew(2); + float64Field.set(0, 1.1f); + float64Field.setValueCount(1); + VarCharVector stringField = + new VarCharVector("stringField", allocator); // Mapped with StandardSQLTypeName.STRING + stringField.allocateNew(2); + stringField.set(0, new Text("text1")); + stringField.setValueCount(1); + TimeStampMicroVector timeStampField = + new TimeStampMicroVector( + "timeStampField", allocator); // Mapped with StandardSQLTypeName.TIMESTAMP + timeStampField.allocateNew(2); + timeStampField.set(0, 10000L); + timeStampField.setValueCount(1); + VarBinaryVector bytesField = + new VarBinaryVector("bytesField", allocator); // Mapped with StandardSQLTypeName.BYTES + bytesField.allocateNew(2); + bytesField.set(0, "text1".getBytes()); + bytesField.setValueCount(1); + + ListVector listVector = ListVector.empty("intArrayField", allocator); + UnionListWriter listWriter = listVector.getWriter(); + listWriter.setPosition(0); + listWriter.startList(); + listWriter.writeBigInt(10L); + listWriter.writeBigInt(20L); + listWriter.setValueCount(2); + listWriter.endList(); + listVector.setValueCount(1); + + StructVector structVector = StructVector.empty("structField", allocator); + VarCharVector nameVector = + structVector.addOrGet( + "name", FieldType.notNullable(VARCHAR.getType()), VarCharVector.class); + IntVector ageVector = + structVector.addOrGet("age", FieldType.notNullable(INT.getType()), IntVector.class); + structVector.allocateNew(); + + nameVector.set(0, new Text("Jon Doe")); + nameVector.setValueCount(1); + + ageVector.set(0, 29); + ageVector.setValueCount(1); + + structVector.setValueCount(1); + structVector.setIndexDefined(0); + + IntVector numericField = + new IntVector("numericField", allocator); // Mapped with StandardSQLTypeName.BIGNUMERIC + numericField.allocateNew(1000); + numericField.set(0, 1); + numericField.setValueCount(1); + TimeMilliVector timeField = + new TimeMilliVector("timeField", allocator); // Mapped with StandardSQLTypeName.TIME + timeField.allocateNew(2); + timeField.set(0, 1234); + timeField.setValueCount(1); + DateMilliVector dateField = + new DateMilliVector("dateField", allocator); // Mapped with StandardSQLTypeName.DATE + dateField.allocateNew(2); + dateField.set(0, 5000); + dateField.setValueCount(1); + + List fieldVectors = + ImmutableList.of( + boolField, + int64Filed, + float64Field, + stringField, + timeStampField, + bytesField, + listVector, + structVector, + numericField, + timeField, + dateField); + return new VectorSchemaRoot(fieldVectors); + } + + private JsonStringArrayList getJsonStringArrayList() { + JsonStringArrayList jsonStringArrayList = new JsonStringArrayList<>(); + jsonStringArrayList.addAll(Arrays.asList(10L, 20L)); + return jsonStringArrayList; + } + + @Before + public void setUp() throws SQLException, IOException { + buffer = new LinkedBlockingDeque<>(); + bufferWithTwoRows = new LinkedBlockingDeque<>(); + vectorSchemaRoot = getTestVectorSchemaRoot(); + ArrowRecordBatch batch = + ArrowRecordBatch.newBuilder() + .setSerializedRecordBatch(serializeVectorSchemaRoot(vectorSchemaRoot)) + .build(); + arrowBatchWrapper = BigQueryArrowBatchWrapper.of(batch); + arrowBatchWrapperLast = BigQueryArrowBatchWrapper.of(null, true); // last flag + buffer.add(arrowBatchWrapper); + buffer.add(arrowBatchWrapperLast); + bufferWithTwoRows.add(arrowBatchWrapper); + bufferWithTwoRows.add(arrowBatchWrapperLast); + + statement = mock(BigQueryStatement.class); + ArrowSchema arrowSchema = + ArrowSchema.newBuilder() + .setSerializedSchema(serializeSchema(vectorSchemaRoot.getSchema())) + .build(); + Thread workerThread = new Thread(); + bigQueryArrowResultSet = + BigQueryArrowResultSet.of( + QUERY_SCHEMA, arrowSchema, 1, statement, buffer, workerThread, null); + + // nested result set data setup + JsonStringArrayList jsonStringArrayList = getJsonStringArrayList(); + Schema arraySchema = + Schema.of( + Field.newBuilder("integerArray", StandardSQLTypeName.INT64) + .setMode(Mode.REPEATED) + .build()); + bigQueryArrowResultSetNested = + BigQueryArrowResultSet.getNestedResultSet( + arraySchema, + BigQueryArrowBatchWrapper.getNestedFieldValueListWrapper(jsonStringArrayList), + 0, + jsonStringArrayList.size()); + } + + @Test + public void testVectorSchemaRoot() { + assertThat(vectorSchemaRoot).isNotNull(); + assertThat(vectorSchemaRoot.getRowCount()).isEqualTo(1); + } + + @Test + public void testBufferSize() { + assertThat(buffer).isNotNull(); + assertThat(buffer.size()).isEqualTo(2); + } + + @Test + public void testRowCount() throws SQLException, IOException { + ArrowSchema arrowSchema = + ArrowSchema.newBuilder() + .setSerializedSchema(serializeSchema(vectorSchemaRoot.getSchema())) + .build(); + Thread workerThread = new Thread(); + // ResultSet with 1 row buffer and 1 total rows. + BigQueryArrowResultSet bigQueryArrowResultSet2 = + BigQueryArrowResultSet.of( + QUERY_SCHEMA, arrowSchema, 1, statement, buffer, workerThread, null); + + assertThat(resultSetRowCount(bigQueryArrowResultSet2)).isEqualTo(1); + // ResultSet with 2 rows buffer and 1 total rows. + bigQueryArrowResultSet2 = + BigQueryArrowResultSet.of( + QUERY_SCHEMA, arrowSchema, 1, statement, bufferWithTwoRows, workerThread, null); + + assertThat(resultSetRowCount(bigQueryArrowResultSet2)).isEqualTo(1); + } + + @Test + // This method tests iteration and Resultset's type getters + public void testIteration() throws SQLException { + int cnt = 0; + assertThat(bigQueryArrowResultSet.isBeforeFirst()).isTrue(); + while (bigQueryArrowResultSet.next()) { + cnt++; + assertThat(bigQueryArrowResultSet.isLast()).isTrue(); // we have one test row + assertThat(bigQueryArrowResultSet.isFirst()).isTrue(); // we have one test row + + assertThat(bigQueryArrowResultSet.getString(4)).isEqualTo("text1"); + + // array + assertThat(bigQueryArrowResultSet.getArray("intArrayField").getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(bigQueryArrowResultSet.getArray(7).getArray()).isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryArrowResultSet.getObject("intArrayField")).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryArrowResultSet.getObject(7)).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + + // struct + assertThat(((Struct) bigQueryArrowResultSet.getObject("structField")).getAttributes()) + .isEqualTo(new Object[] {"Jon Doe", 29L}); + assertThat(((Struct) bigQueryArrowResultSet.getObject(8)).getAttributes()) + .isEqualTo(new Object[] {"Jon Doe", 29L}); + } + assertThat(cnt).isEqualTo(1); + assertThat(bigQueryArrowResultSet.next()).isFalse(); + assertThat(bigQueryArrowResultSet.isAfterLast()).isTrue(); + } + + @Test + public void testIsClosed() { + assertThat(bigQueryArrowResultSet.isClosed()).isFalse(); + } + + @Test + public void testResultSetHoldability() throws SQLException { + assertThat(bigQueryArrowResultSet.getHoldability()) + .isEqualTo(bigQueryArrowResultSet.HOLD_CURSORS_OVER_COMMIT); + } + + @Test + public void testStatement() throws SQLException { + assertThat(bigQueryArrowResultSet.getStatement()).isEqualTo(statement); + assertThat(bigQueryArrowResultSetNested.getStatement()).isNull(); + } + + @Test + public void testConcurrencyTypeColumn() throws SQLException { + assertThat(bigQueryArrowResultSet.getConcurrency()).isEqualTo(ResultSet.CONCUR_READ_ONLY); + assertThat(bigQueryArrowResultSet.getType()).isEqualTo(ResultSet.TYPE_FORWARD_ONLY); + assertThat(bigQueryArrowResultSet.findColumn("boolField")).isEqualTo(1); + } + + @Test + public void testIterationNested() throws SQLException { + int cnt = 0; + assertThat(bigQueryArrowResultSetNested.isBeforeFirst()).isTrue(); + while (bigQueryArrowResultSetNested.next()) { + cnt++; + if (cnt == 1) { + assertThat(bigQueryArrowResultSetNested.isFirst()).isTrue(); + + } else { // 2nd row is the last row + assertThat(bigQueryArrowResultSetNested.isLast()).isTrue(); + } + assertThat(bigQueryArrowResultSetNested.getInt(1)) + .isEqualTo(cnt); // the first column is index 1 + assertThat(bigQueryArrowResultSetNested.getInt(2)) + .isEqualTo(cnt * 10); // second column has values 10 and 20 + } + assertThat(cnt).isEqualTo(2); + assertThat(bigQueryArrowResultSetNested.next()).isFalse(); + assertThat(bigQueryArrowResultSetNested.isAfterLast()).isTrue(); + } + + private int resultSetRowCount(BigQueryArrowResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } + + // TODO: Unit Test for iteration and getters +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStructTest.java new file mode 100644 index 0000000000..2c3bedcc4c --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryArrowStructTest.java @@ -0,0 +1,245 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowArraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arrowStructOf; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.toArrowStruct; +import static com.google.common.truth.Truth.assertThat; +import static java.lang.Boolean.FALSE; +import static java.lang.Boolean.TRUE; +import static java.time.Month.MARCH; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; +import org.apache.arrow.vector.util.Text; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class BigQueryArrowStructTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private Struct structWithPrimitiveValues; + + @Before + public void setUp() { + Tuple> schemaAndValues = + arrowStructOf( + Tuple.of(INT64, Long.valueOf("10")), + Tuple.of(BOOL, TRUE), + Tuple.of(FLOAT64, Double.valueOf("11.2")), + Tuple.of(NUMERIC, new BigDecimal("11.2657")), + Tuple.of(BIGNUMERIC, new BigDecimal("11.2657")), + Tuple.of(STRING, new Text("one")), + Tuple.of(TIMESTAMP, Long.valueOf("1680174859820227")), // 2023-03-30 16:44:19.82 + Tuple.of(DATE, 19446), // 2023-03-30 + Tuple.of(TIME, Long.valueOf("40459820227")), + Tuple.of(DATETIME, LocalDateTime.parse("2023-03-30T11:14:19.820227")), + Tuple.of(GEOGRAPHY, new Text("POINT(-122 47)")), + Tuple.of(BYTES, "one".getBytes())); + + structWithPrimitiveValues = new BigQueryArrowStruct(schemaAndValues.x(), schemaAndValues.y()); + } + + @Test + public void structOfPrimitives() throws SQLException { + assertThat(structWithPrimitiveValues.getAttributes()) + .isEqualTo( + asList( + 10L, + true, + 11.2, + new BigDecimal("11.2657"), + new BigDecimal("11.2657"), + "one", + Timestamp.valueOf(LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000)), + Date.valueOf(LocalDate.of(2023, MARCH, 30)), + Time.valueOf(LocalTime.of(11, 14, 19, 820227)), + Timestamp.valueOf("2023-03-30 11:14:19.820227"), + "POINT(-122 47)", + "one".getBytes()) + .toArray()); + } + + @Test + public void structOfArrays() throws SQLException { + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820227); + List>> schemaAndValues = + Arrays.asList( + arrowArraySchemaAndValue(INT64, 10L, 20L), + arrowArraySchemaAndValue(BOOL, Boolean.TRUE, FALSE), + arrowArraySchemaAndValue(FLOAT64, Double.valueOf("11.2"), Double.valueOf("33.4")), + arrowArraySchemaAndValue(NUMERIC, new BigDecimal("11.2657"), new BigDecimal("33.4657")), + arrowArraySchemaAndValue( + BIGNUMERIC, new BigDecimal("11.2657"), new BigDecimal("33.4657")), + arrowArraySchemaAndValue(STRING, new Text("one"), new Text("two")), + arrowArraySchemaAndValue( + TIMESTAMP, Long.valueOf("1680174859820227"), Long.valueOf("1680261259820227")), + arrowArraySchemaAndValue(DATE, 19446, 19447), + arrowArraySchemaAndValue( + TIME, Long.valueOf("40459820227"), Long.valueOf("40460820227")), + arrowArraySchemaAndValue( + DATETIME, + LocalDateTime.parse("2023-03-30T11:14:19.820227"), + LocalDateTime.parse("2023-03-30T11:15:19.820227")), + arrowArraySchemaAndValue( + GEOGRAPHY, new Text("POINT(-122 47)"), new Text("POINT(-122 48)")), + arrowArraySchemaAndValue( + BYTES, Stream.of("one", "two").map(String::getBytes).toArray(byte[][]::new))); + + List orderedSchemas = + schemaAndValues.stream().map(Tuple::x).collect(Collectors.toList()); + JsonStringHashMap jsonStringHashMap = toArrowStruct(schemaAndValues); + + Struct struct = new BigQueryArrowStruct(FieldList.of(orderedSchemas), jsonStringHashMap); + + Object[] attributes = struct.getAttributes(); + assertThat(((Array) attributes[0]).getArray()).isEqualTo(new Long[] {10L, 20L}); + assertThat(((Array) attributes[1]).getArray()).isEqualTo(new Boolean[] {true, false}); + assertThat(((Array) attributes[2]).getArray()).isEqualTo(new Double[] {11.2, 33.4}); + assertThat(((Array) attributes[3]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[4]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[5]).getArray()).isEqualTo(new String[] {"one", "two"}); + assertThat(((Array) attributes[6]).getArray()) + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)) + }); + assertThat(((Array) attributes[7]).getArray()) + .isEqualTo(new Date[] {Date.valueOf(aDate), Date.valueOf(aDate.plusDays(1))}); + assertThat(((Array) attributes[8]).getArray()) + .isEqualTo(new Time[] {Time.valueOf(aTime), Time.valueOf(aTime.plusSeconds(1))}); + assertThat(((Array) attributes[9]).getArray()) // DATETIME + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf("2023-03-30 11:14:19.820227"), + Timestamp.valueOf("2023-03-30 11:15:19.820227") + }); + assertThat(((Array) attributes[10]).getArray()) + .isEqualTo(new String[] {"POINT(-122 47)", "POINT(-122 48)"}); + assertThat(((Array) attributes[11]).getArray()) + .isEqualTo(new byte[][] {"one".getBytes(), "two".getBytes()}); + } + + @Test + public void structOfStructs() throws SQLException { + FieldList profileSchema = + FieldList.of( + Field.of("name", LegacySQLTypeName.STRING), + Field.of("age", LegacySQLTypeName.INTEGER), + Field.of("adult", LegacySQLTypeName.BOOLEAN)); + FieldList addressSchema = + FieldList.of( + Field.of("state", LegacySQLTypeName.STRING), + Field.of("zip", LegacySQLTypeName.INTEGER)); + FieldList rootStructSchema = + FieldList.of( + Field.of("profile", LegacySQLTypeName.RECORD, profileSchema), + Field.of("address", LegacySQLTypeName.RECORD, addressSchema)); + + JsonStringHashMap profileValue = + new JsonStringHashMap() { + { + put("name", new Text("Arya")); + put("age", 15L); + put("adult", FALSE); + } + }; + JsonStringHashMap addressValue = + new JsonStringHashMap() { + { + put("state", new Text("Michigan")); + put("zip", 49086L); + } + }; + JsonStringHashMap rootStructValue = + new JsonStringHashMap() { + { + put("profile", profileValue); + put("address", addressValue); + } + }; + + Struct struct = new BigQueryArrowStruct(rootStructSchema, rootStructValue); + Object[] attributes = struct.getAttributes(); + Struct profileStruct = (Struct) attributes[0]; + Struct addressStruct = (Struct) attributes[1]; + + assertThat(profileStruct.getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(addressStruct.getAttributes()).isEqualTo(asList("Michigan", 49086L).toArray()); + } + + @Test + public void getSQLTypeNameIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, structWithPrimitiveValues::getSQLTypeName); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getAttributesWithCustomTypeMappingsIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, + () -> structWithPrimitiveValues.getAttributes(emptyMap())); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSetTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSetTest.java new file mode 100644 index 0000000000..90dad9935a --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBaseResultSetTest.java @@ -0,0 +1,104 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertFalse; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.CALLS_REAL_METHODS; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import java.lang.reflect.Field; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryBaseResultSetTest { + private BigQuery bigQuery; + private BigQueryBaseResultSet resultSet; + private Job job; + private QueryStatistics statistics; + + @Before + public void setUp() { + // Using mock() for QueryStatistics because Builder() seems to not be available + // from outside. + bigQuery = mock(BigQuery.class); + job = mock(Job.class); + doReturn(job).when(bigQuery).getJob(any(JobId.class)); + + statistics = mock(QueryStatistics.class); + doReturn(statistics).when(job).getStatistics(); + + resultSet = mock(BigQueryBaseResultSet.class, CALLS_REAL_METHODS); + try { + Field field = BigQueryBaseResultSet.class.getDeclaredField("bigQuery"); + field.setAccessible(true); + field.set(resultSet, bigQuery); + } catch (Exception e) { + assertFalse(true); + } + } + + @Test + public void testGetQueryId() { + resultSet.setQueryId("queryId"); + assertThat(resultSet.getQueryId()).isEqualTo("queryId"); + } + + @Test + public void testGetJobId() { + resultSet.setJobId(JobId.of("jobId")); + assertThat(resultSet.getJobId()).isEqualTo(JobId.of("jobId")); + } + + @Test + public void testGetQueryStatistics() { + resultSet.setJobId(JobId.of("jobId")); + assertThat(resultSet.getQueryStatistics()).isInstanceOf(QueryStatistics.class); + } + + @Test + public void testGetQueryStatisticsCaching() { + resultSet.setJobId(JobId.of("jobId")); + assertThat(resultSet.getQueryStatistics()).isInstanceOf(QueryStatistics.class); + // Change return value to null to ensure lazy init saved the state + doReturn(null).when(job).getStatistics(); + assertThat(resultSet.getQueryStatistics()).isInstanceOf(QueryStatistics.class); + } + + @Test + public void testGetQueryStatistics_no_client() { + resultSet = mock(BigQueryBaseResultSet.class, CALLS_REAL_METHODS); + assertThat(resultSet.getQueryStatistics()).isNull(); + } + + @Test + public void testGetQueryStatistics_no_job_id() { + assertThat(resultSet.getQueryStatistics()).isNull(); + } + + @Test + public void testGetQueryStatistics_no_job() { + doReturn(job).when(bigQuery).getJob(any(JobId.class)); + assertThat(resultSet.getQueryStatistics()).isNull(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBigQueryTypeCoercerBuilderTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBigQueryTypeCoercerBuilderTest.java new file mode 100644 index 0000000000..4af1632456 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryBigQueryTypeCoercerBuilderTest.java @@ -0,0 +1,43 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.jdbc.TestType.Text; +import org.junit.Test; + +public class BigQueryBigQueryTypeCoercerBuilderTest { + + @Test + public void shouldBeAbleToConvertCustomTypes() { + byte[] bytesArray = {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33}; + Text text = new Text(bytesArray); + + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder().registerTypeCoercion(new TextToStringCoercion()).build(); + + assertThat(bigQueryTypeCoercer.coerceTo(String.class, text)).isEqualTo("Hello World!"); + } + + private static class TextToStringCoercion implements BigQueryCoercion { + @Override + public String coerce(Text value) { + return new String(value.getBytes()); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatementTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatementTest.java new file mode 100644 index 0000000000..f9729bf21e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryCallableStatementTest.java @@ -0,0 +1,1118 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import java.io.IOException; +import java.io.Reader; +import java.io.StringReader; +import java.math.BigDecimal; +import java.sql.*; +import java.util.Calendar; +import java.util.HashMap; +import java.util.Map; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryCallableStatementTest { + + private BigQueryConnection bigQueryConnection; + private static final String GET_PARAM_KEY = "ParamKey"; + private static final String PARAM_KEY = GET_PARAM_KEY; + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + } + + @Test + public void testCreateCallableStatement() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc"); + assertNotNull(statement); + + assertEquals("call testProc", statement.getCallableStatementSql()); + } + + @Test + public void testRegisterOutParamIndexVarchar() throws SQLException { + registerOutParamIndexHelper(1, Types.VARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamIndexNVarchar() throws SQLException { + registerOutParamIndexHelper(1, Types.NVARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamIndexBigInt() throws SQLException { + registerOutParamIndexHelper(1, Types.BIGINT, Long.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamIndexInteger() throws SQLException { + registerOutParamIndexHelper(1, Types.INTEGER, Integer.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamIndexBoolean() throws SQLException { + registerOutParamIndexHelper(1, Types.BOOLEAN, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamIndexDouble() throws SQLException { + registerOutParamIndexHelper(1, Types.DOUBLE, Double.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamIndexFloat() throws SQLException { + registerOutParamIndexHelper(1, Types.FLOAT, Float.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamIndexNumeric() throws SQLException { + registerOutParamIndexHelper(1, Types.NUMERIC, BigDecimal.class, StandardSQLTypeName.NUMERIC, 2); + } + + @Test + public void testRegisterOutParamIndexTimestamp() throws SQLException { + registerOutParamIndexHelper( + 1, Types.TIMESTAMP, Timestamp.class, StandardSQLTypeName.TIMESTAMP, -1); + } + + @Test + public void testRegisterOutParamIndexDate() throws SQLException { + registerOutParamIndexHelper(1, Types.DATE, Date.class, StandardSQLTypeName.DATE, -1); + } + + @Test + public void testRegisterOutParamIndexTime() throws SQLException { + registerOutParamIndexHelper(1, Types.TIME, Time.class, StandardSQLTypeName.TIME, -1); + } + + @Test + public void testRegisterOutParamIndexOther() throws SQLException { + registerOutParamIndexHelper(1, Types.OTHER, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamIndexBinary() throws SQLException { + registerOutParamIndexHelper(1, Types.BINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamIndexVarBinary() throws SQLException { + registerOutParamIndexHelper(1, Types.VARBINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamIndexStruct() throws SQLException { + registerOutParamIndexHelper(1, Types.STRUCT, Struct.class, StandardSQLTypeName.STRUCT, -1); + } + + @Test + public void testRegisterOutParamIndexArray() throws SQLException { + registerOutParamIndexHelper(1, Types.ARRAY, Array.class, StandardSQLTypeName.ARRAY, -1); + } + + @Test + public void testRegisterOutParamIndexBit() throws SQLException { + registerOutParamIndexHelper(1, Types.BIT, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamNameVarchar() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.VARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamNameNVarchar() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.NVARCHAR, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamNameBigInt() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.BIGINT, Long.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamNameInteger() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.INTEGER, Integer.class, StandardSQLTypeName.INT64, -1); + } + + @Test + public void testRegisterOutParamNameBoolean() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.BOOLEAN, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamNameDouble() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.DOUBLE, Double.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamNameFloat() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.FLOAT, Float.class, StandardSQLTypeName.FLOAT64, -1); + } + + @Test + public void testRegisterOutParamNameNumeric() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.NUMERIC, BigDecimal.class, StandardSQLTypeName.NUMERIC, 2); + } + + @Test + public void testRegisterOutParamNameTimestamp() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.TIMESTAMP, Timestamp.class, StandardSQLTypeName.TIMESTAMP, -1); + } + + @Test + public void testRegisterOutParamNameDate() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.DATE, Date.class, StandardSQLTypeName.DATE, -1); + } + + @Test + public void testRegisterOutParamNameTime() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.TIME, Time.class, StandardSQLTypeName.TIME, -1); + } + + @Test + public void testRegisterOutParamNameOther() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.OTHER, String.class, StandardSQLTypeName.STRING, -1); + } + + @Test + public void testRegisterOutParamNameBinary() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.BINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamNameVarBinary() throws SQLException { + registerOutParamNameHelper( + "ParamKey", Types.VARBINARY, byte[].class, StandardSQLTypeName.BYTES, -1); + } + + @Test + public void testRegisterOutParamNameStruct() throws SQLException { + registerOutParamIndexHelper(1, Types.STRUCT, Struct.class, StandardSQLTypeName.STRUCT, -1); + } + + @Test + public void testRegisterOutParamNameArray() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.ARRAY, Array.class, StandardSQLTypeName.ARRAY, -1); + } + + @Test + public void testRegisterOutParamNameBit() throws SQLException { + registerOutParamNameHelper("ParamKey", Types.BIT, Boolean.class, StandardSQLTypeName.BOOL, -1); + } + + @Test + public void testRegisterOutParamIndexScaleFail() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + assertThrows( + IllegalArgumentException.class, () -> statement.registerOutParameter(1, Types.VARCHAR, 3)); + } + + @Test + public void testRegisterOutNameIndexScaleFail() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + assertThrows( + IllegalArgumentException.class, + () -> statement.registerOutParameter("ParamKey", Types.VARCHAR, 3)); + } + + @Test + public void testGetArrayParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Array expected = mock(Array.class); + + statement.getParameterHandler().setParameter(1, expected, Array.class); + Array actual = statement.getArray(1); + assertEquals(expected, actual); + } + + @Test + public void testGetArrayParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Array expected = mock(Array.class); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Array.class, BigQueryStatementParameterType.IN, 0); + Array actual = statement.getArray(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetBigDecimalParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + BigDecimal expected = mock(BigDecimal.class); + + statement.getParameterHandler().setParameter(1, expected, BigDecimal.class); + BigDecimal actual = statement.getBigDecimal(1); + assertEquals(expected, actual); + } + + @Test + public void testGetBigDecimalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + BigDecimal expected = mock(BigDecimal.class); + + statement + .getParameterHandler() + .setParameter( + GET_PARAM_KEY, expected, BigDecimal.class, BigQueryStatementParameterType.IN, 0); + BigDecimal actual = statement.getBigDecimal(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetBooleanParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Boolean expected = true; + + statement.getParameterHandler().setParameter(1, expected, Boolean.class); + Boolean actual = statement.getBoolean(1); + assertEquals(expected, actual); + } + + @Test + public void testGetBooleanParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Boolean expected = true; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Boolean.class, BigQueryStatementParameterType.IN, 0); + Boolean actual = statement.getBoolean(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetByteParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Byte expected = "hello".getBytes()[0]; + + statement.getParameterHandler().setParameter(1, expected, Byte.class); + Byte actual = statement.getByte(1); + assertEquals(expected, actual); + } + + @Test + public void testGetByteParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Byte expected = "hello".getBytes()[0]; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Byte.class, BigQueryStatementParameterType.IN, 0); + Byte actual = statement.getByte(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetBytesParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + byte[] expected = "hello".getBytes(); + + statement.getParameterHandler().setParameter(1, expected, byte[].class); + byte[] actual = statement.getBytes(1); + assertEquals(expected, actual); + } + + @Test + public void testGetBytesParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + byte[] expected = "hello".getBytes(); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, byte[].class, BigQueryStatementParameterType.IN, 0); + byte[] actual = statement.getBytes(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetCharacterStreamParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Reader actual = statement.getCharacterStream(1); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetCharacterStreamParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Reader actual = statement.getCharacterStream(GET_PARAM_KEY); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetDateParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expected = mock(Date.class); + + statement.getParameterHandler().setParameter(1, expected, Date.class); + Date actual = statement.getDate(1); + assertEquals(expected, actual); + } + + @Test + public void testGetDateParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expected = mock(Date.class); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Date.class, BigQueryStatementParameterType.IN, 0); + Date actual = statement.getDate(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetDateParamWithCalByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Date expected = new Date(cal.getTimeInMillis()); + + statement.getParameterHandler().setParameter(1, expected, Date.class); + Date actual = statement.getDate(1, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetDateParamWithCalByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Date expected = new Date(cal.getTimeInMillis()); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Date.class, BigQueryStatementParameterType.IN, 0); + Date actual = statement.getDate(GET_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetDoubleParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Double expected = 10.123; + + statement.getParameterHandler().setParameter(1, expected, Double.class); + Double actual = statement.getDouble(1); + assertEquals(expected, actual); + } + + @Test + public void testGetDoubleParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Double expected = 10.123; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Double.class, BigQueryStatementParameterType.IN, 0); + Double actual = statement.getDouble(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetFloatParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Float expected = 10.123F; + + statement.getParameterHandler().setParameter(1, expected, Float.class); + Float actual = statement.getFloat(1); + assertEquals(expected, actual); + } + + @Test + public void testGetFloatParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Float expected = 10.123F; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Float.class, BigQueryStatementParameterType.IN, 0); + Float actual = statement.getFloat(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetIntegerParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Integer expected = 10; + + statement.getParameterHandler().setParameter(1, expected, Integer.class); + Integer actual = statement.getInt(1); + assertEquals(expected, actual); + } + + @Test + public void testGetIntegerParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Integer expected = 10; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Integer.class, BigQueryStatementParameterType.IN, 0); + Integer actual = statement.getInt(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetLongParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 10L; + + statement.getParameterHandler().setParameter(1, expected, Long.class); + Long actual = statement.getLong(1); + assertEquals(expected, actual); + } + + @Test + public void testGetLongParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 10L; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Long.class, BigQueryStatementParameterType.IN, 0); + Long actual = statement.getLong(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetNCharacterStreamParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Reader actual = statement.getNCharacterStream(1); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetNCharacterStreamParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Reader actual = statement.getNCharacterStream(GET_PARAM_KEY); + assertNotNull(actual); + assertTrue(actual instanceof StringReader); + } + + @Test + public void testGetNStringParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + String actual = statement.getNString(1); + assertEquals(expected, actual); + } + + @Test + public void testGetNStringByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + String actual = statement.getNString(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetObjectParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Object actual = statement.getObject(1); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Object actual = statement.getObject(GET_PARAM_KEY); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithMapByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Map> map = new HashMap<>(); + map.putIfAbsent(StandardSQLTypeName.STRING.name(), String.class); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Object actual = statement.getObject(1, map); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithMapByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Map> map = new HashMap<>(); + map.putIfAbsent(StandardSQLTypeName.STRING.name(), String.class); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Object actual = statement.getObject(GET_PARAM_KEY, map); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithClassByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + Object actual = statement.getObject(1, String.class); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetObjectParamWithClassByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + Object actual = statement.getObject(GET_PARAM_KEY, String.class); + assertEquals(expected, actual.toString()); + } + + @Test + public void testGetStringParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "test"; + + statement.getParameterHandler().setParameter(1, expected, String.class); + String actual = statement.getString(1); + assertEquals(expected, actual); + } + + @Test + public void testGetStringParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "test"; + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, String.class, BigQueryStatementParameterType.IN, 0); + String actual = statement.getString(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expected = mock(Time.class); + + statement.getParameterHandler().setParameter(1, expected, Time.class); + Time actual = statement.getTime(1); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expected = mock(Time.class); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Time.class, BigQueryStatementParameterType.IN, 0); + Time actual = statement.getTime(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamWithCalByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Time expected = new Time(cal.getTimeInMillis()); + + statement.getParameterHandler().setParameter(1, expected, Time.class); + Time actual = statement.getTime(1, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetTimeParamWithCalByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Time expected = new Time(cal.getTimeInMillis()); + + statement + .getParameterHandler() + .setParameter(GET_PARAM_KEY, expected, Time.class, BigQueryStatementParameterType.IN, 0); + Time actual = statement.getTime(GET_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expected = mock(Timestamp.class); + + statement.getParameterHandler().setParameter(1, expected, Timestamp.class); + Timestamp actual = statement.getTimestamp(1); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expected = mock(Timestamp.class); + + statement + .getParameterHandler() + .setParameter( + GET_PARAM_KEY, expected, Timestamp.class, BigQueryStatementParameterType.IN, 0); + Timestamp actual = statement.getTimestamp(GET_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamWithCalByIndex() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Timestamp expected = new Timestamp(cal.getTimeInMillis()); + + statement.getParameterHandler().setParameter(1, expected, Timestamp.class); + Timestamp actual = statement.getTimestamp(1, cal); + assertEquals(expected, actual); + } + + @Test + public void testGetTimestampParamWithCalByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Calendar cal = Calendar.getInstance(); + Timestamp expected = new Timestamp(cal.getTimeInMillis()); + + statement + .getParameterHandler() + .setParameter( + GET_PARAM_KEY, expected, Timestamp.class, BigQueryStatementParameterType.IN, 0); + Timestamp actual = statement.getTimestamp(GET_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testSetBigDecimalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + BigDecimal expected = mock(BigDecimal.class); + + statement.setBigDecimal(PARAM_KEY, expected); + BigDecimal actual = statement.getBigDecimal(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetBooleanParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Boolean expected = true; + + statement.setBoolean(PARAM_KEY, expected); + Boolean actual = statement.getBoolean(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetByteParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Byte expected = "hello".getBytes()[0]; + + statement.setByte(PARAM_KEY, expected); + Byte actual = statement.getByte(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetByteArrayParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + byte[] expected = "heelo".getBytes(); + + statement.setBytes(PARAM_KEY, expected); + byte[] actual = statement.getBytes(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetDateParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expected = mock(Date.class); + statement.setDate(PARAM_KEY, expected); + Date actual = statement.getDate(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetDateCalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Date expectedDate = mock(Date.class); + Calendar expectedCal = mock(Calendar.class); + + doReturn(1L).when(expectedDate).getTime(); + doReturn(1L).when(expectedCal).getTime(); + doReturn(1L).when(expectedCal).getTimeInMillis(); + statement.setDate(PARAM_KEY, expectedDate, expectedCal); + Date actual = statement.getDate(PARAM_KEY); + assertEquals(new Date(1L), actual); + actual = statement.getDate(PARAM_KEY, expectedCal); + assertEquals(new Date(1L), actual); + } + + @Test + public void testSetDoubleParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Double expected = 123.123; + statement.setDouble(PARAM_KEY, expected); + Double actual = statement.getDouble(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetFloatParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Float expected = 123.123F; + statement.setFloat(PARAM_KEY, expected); + Float actual = statement.getFloat(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetIntParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Integer expected = 1; + statement.setInt(PARAM_KEY, expected); + Integer actual = statement.getInt(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetLongParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setLong(PARAM_KEY, expected); + Long actual = statement.getLong(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetObjectParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setObject(PARAM_KEY, expected); + Long actual = statement.getObject(PARAM_KEY, Long.class); + assertEquals(expected, actual); + } + + @Test + public void testSetObjectSqlTypeParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setObject(PARAM_KEY, expected, java.sql.Types.BIGINT); + Long actual = statement.getObject(PARAM_KEY, Long.class); + assertEquals(expected, actual); + } + + @Test + public void testSetObjectSqlTypeScaleParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Long expected = 1L; + statement.setObject(PARAM_KEY, expected, java.sql.Types.BIGINT, 5); + Long actual = statement.getObject(PARAM_KEY, Long.class); + assertEquals(expected, actual); + int scale = statement.getParameterHandler().getParameterScale(PARAM_KEY); + assertEquals(5, scale); + } + + @Test + public void testSetStringParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + String expected = "hello"; + statement.setString(PARAM_KEY, expected); + String actual = statement.getString(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetTimeParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expected = mock(Time.class); + statement.setTime(PARAM_KEY, expected); + Time actual = statement.getTime(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetTimeCalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Time expectedTime = mock(Time.class); + Calendar expectedCal = mock(Calendar.class); + + doReturn(1L).when(expectedTime).getTime(); + doReturn(1L).when(expectedCal).getTime(); + doReturn(1L).when(expectedCal).getTimeInMillis(); + statement.setTime(PARAM_KEY, expectedTime, expectedCal); + Time actual = statement.getTime(PARAM_KEY); + assertEquals(new Time(1L), actual); + actual = statement.getTime(PARAM_KEY, expectedCal); + assertEquals(new Time(1L), actual); + } + + @Test + public void testSetTimestampParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expected = mock(Timestamp.class); + statement.setTimestamp(PARAM_KEY, expected); + Timestamp actual = statement.getTimestamp(PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetTimestampCalParamByName() throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + Timestamp expectedTimestamp = mock(Timestamp.class); + Calendar expectedCal = mock(Calendar.class); + + doReturn(1L).when(expectedTimestamp).getTime(); + doReturn(1L).when(expectedCal).getTime(); + doReturn(1L).when(expectedCal).getTimeInMillis(); + statement.setTimestamp(PARAM_KEY, expectedTimestamp, expectedCal); + Timestamp actual = statement.getTimestamp(PARAM_KEY); + assertEquals(new Timestamp(1L), actual); + actual = statement.getTimestamp(PARAM_KEY, expectedCal); + assertEquals(new Timestamp(1L), actual); + } + + ////////// Private helper methods //////////////////////// + private void registerOutParamIndexHelper( + int paramIndex, int javaSqlType, Class javaType, StandardSQLTypeName sqlType, int scale) + throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + + if (scale >= 0) { + statement.registerOutParameter(paramIndex, javaSqlType, scale); + } else { + statement.registerOutParameter(paramIndex, javaSqlType); + } + BigQueryParameterHandler paramHandler = statement.getParameterHandler(); + assertNotNull(paramHandler); + + assertNull(paramHandler.getParameter(paramIndex)); + assertEquals(BigQueryStatementParameterType.OUT, paramHandler.getParameterType(paramIndex)); + assertEquals(scale, paramHandler.getParameterScale(paramIndex)); + assertEquals(javaType, paramHandler.getType(paramIndex)); + assertEquals(sqlType, paramHandler.getSqlType(paramIndex)); + } + + private void registerOutParamNameHelper( + String paramName, int javaSqlType, Class javaType, StandardSQLTypeName sqlType, int scale) + throws SQLException { + BigQueryCallableStatement statement = + new BigQueryCallableStatement(bigQueryConnection, "call testProc('?')"); + assertNotNull(statement); + + if (scale >= 0) { + statement.registerOutParameter(paramName, javaSqlType, scale); + } else { + statement.registerOutParameter(paramName, javaSqlType); + } + BigQueryParameterHandler paramHandler = statement.getParameterHandler(); + assertNotNull(paramHandler); + + assertNull(paramHandler.getParameter(paramName)); + assertEquals(BigQueryStatementParameterType.OUT, paramHandler.getParameterType(paramName)); + assertEquals(scale, paramHandler.getParameterScale(paramName)); + assertEquals(javaType, paramHandler.getType(paramName)); + assertEquals(sqlType, paramHandler.getSqlType(paramName)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionTest.java new file mode 100644 index 0000000000..0927271add --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryConnectionTest.java @@ -0,0 +1,416 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.*; + +import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; +import com.google.api.gax.rpc.HeaderProvider; +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.BigQueryWriteClient; +import java.io.IOException; +import java.io.InputStream; +import java.sql.SQLException; +import java.util.Properties; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryConnectionTest { + + private static final String DEFAULT_VERSION = "0.0.0"; + private static final String DEFAULT_JDBC_TOKEN_VALUE = "Google-BigQuery-JDBC-Driver"; + private static final String BASE_URL = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;OAuthAccessToken=redacted;ProjectId=project;"; + private String expectedVersion; + + @Before + public void setUp() throws IOException { + // Read the expected version from the dependencies.properties file once. + expectedVersion = getExpectedVersion(); + } + + private String getExpectedVersion() { + Properties props = new Properties(); + try (InputStream in = + getClass().getResourceAsStream("/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (in != null) { + props.load(in); + String version = props.getProperty("version.jdbc"); + if (version != null) { + return version; + } + } + } catch (IOException e) { + System.err.println("Error reading dependencies.properties: " + e.getMessage()); + } + return DEFAULT_VERSION; + } + + @Test + public void testGetLibraryVersion() throws IOException, SQLException { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + String result = connection.getLibraryVersion(BigQueryConnection.class); + assertEquals(expectedVersion, result); + } + } + + @Test + public void testHeaderProvider() throws IOException, SQLException { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue(agent.startsWith(DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion)); + assertFalse(agent.contains("(GPN:")); + } + } + + @Test + public void testHeaderProviderWithPartnerToken() throws IOException, SQLException { + String partnerTokenString = "(GPN:MyPartner; staging)"; + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken=" + + partnerTokenString; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue( + agent.startsWith( + DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion + " " + partnerTokenString)); + assertTrue(agent.contains("(GPN:")); + assertTrue(agent.contains("MyPartner;")); + assertTrue(agent.contains("staging)")); + } + } + + @Test + public void testHeaderProviderWithEmptyPartnerToken() throws IOException, SQLException { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken="; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue(agent.startsWith(DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion)); + assertFalse(agent.contains("(GPN:")); + } + } + + @Test + public void testHeaderProviderWithPartnerTokenNoEnv() throws IOException, SQLException { + String partnerTokenString = "(GPN:MyPartner)"; + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken=" + + partnerTokenString; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue( + agent.startsWith( + DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion + " " + partnerTokenString)); + assertTrue(agent.contains("GPN:")); + assertTrue(agent.contains("MyPartner")); + } + } + + @Test + public void testHeaderProviderWithInvalidPartner() throws IOException, SQLException { + String partnerTokenString = "(MyPartner; staging)"; + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;PartnerToken=" + + partnerTokenString; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + String agent = headerProvider.getHeaders().get("user-agent"); + assertTrue(agent.startsWith(DEFAULT_JDBC_TOKEN_VALUE + "/" + expectedVersion)); + assertFalse(agent.contains("(MyPartner;")); + assertFalse(agent.contains("(GPN:")); + } + } + + @Test + public void testHeaderProviderWithRequestReason() throws IOException, SQLException { + String requestReason = "Ticket123"; + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;RequestReason=" + + requestReason; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + HeaderProvider headerProvider = connection.createHeaderProvider(); + java.util.Map headers = headerProvider.getHeaders(); + assertTrue(headers.containsKey("x-goog-request-reason")); + assertEquals(requestReason, headers.get("x-goog-request-reason")); + } + } + + @Test + public void testWriteAPIConnectionProperties() throws SQLException { + // Test without connection properties. Defaults to default values. + String connectionUriDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(connectionUriDefault)) { + + assertFalse(connectionDefault.enableWriteAPI); + assertEquals(3, connectionDefault.writeAPIActivationRowCount); + assertEquals(1000, connectionDefault.writeAPIAppendRowCount); + } catch (IOException | SQLException e) { + throw new BigQueryJdbcException(e); + } + + // Test with connection properties + String connectionUri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "EnableWriteAPI=1;SWA_ActivationRowCount=6;SWA_AppendRowCount=500"; + try (BigQueryConnection connection = new BigQueryConnection(connectionUri)) { + assertTrue(connection.enableWriteAPI); + assertEquals(6, connection.writeAPIActivationRowCount); + assertEquals(500, connection.writeAPIAppendRowCount); + } catch (IOException | SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testGetWriteClient() throws SQLException { + // Test without connection properties. Defaults to default values. + String connectionUriDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(connectionUriDefault)) { + assertNull(connectionDefault.bigQueryWriteClient); + // Lazy initialization + BigQueryWriteClient writeClient = connectionDefault.getBigQueryWriteClient(); + assertNotNull(writeClient); + assertFalse(writeClient.isShutdown()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testAdditionalProjects() throws IOException, BigQueryJdbcException { + String url1 = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "AdditionalProjects=projA,projB"; + try (BigQueryConnection conn1 = new BigQueryConnection(url1)) { + String additionalProjects1 = conn1.getAdditionalProjects(); + assertNotNull(additionalProjects1); + assertEquals("projA,projB", additionalProjects1); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + String url2 = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "AdditionalProjects=projX"; + try (BigQueryConnection conn2 = new BigQueryConnection(url2)) { + String additionalProjects2 = conn2.getAdditionalProjects(); + assertNotNull(additionalProjects2); + assertEquals("projX", additionalProjects2); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testFilterTablesOnDefaultDatasetProperty() throws SQLException, IOException { + // Test default value + String urlDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(urlDefault)) { + assertFalse( + "Default value for FilterTablesOnDefaultDataset should be false", + connectionDefault.isFilterTablesOnDefaultDataset()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + + // Test explicitly setting to true + String urlTrue = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "FilterTablesOnDefaultDataset=1;"; + try (BigQueryConnection connectionTrue = new BigQueryConnection(urlTrue)) { + assertTrue( + "FilterTablesOnDefaultDataset should be true when set to 1", + connectionTrue.isFilterTablesOnDefaultDataset()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testRequestGoogleDriveScopeProperty() throws IOException, SQLException { + // Test enabled + String urlEnabled = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "RequestGoogleDriveScope=1;"; + try (BigQueryConnection connectionEnabled = new BigQueryConnection(urlEnabled)) { + assertEquals( + "RequestGoogleDriveScope should be enabled when set to 1", + 1, + connectionEnabled.isRequestGoogleDriveScope()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + + // Test disabled + String urlDisabled = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "RequestGoogleDriveScope=0;"; + try (BigQueryConnection connectionDisabled = new BigQueryConnection(urlDisabled)) { + assertEquals( + "RequestGoogleDriveScope should be disabled when set to 0", + 0, + connectionDisabled.isRequestGoogleDriveScope()); + } catch (SQLException | IOException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testMetaDataFetchThreadCountProperty() throws SQLException, IOException { + // Test Case 1: Should use the default value when the property is not specified. + String urlDefault = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connectionDefault = new BigQueryConnection(urlDefault)) { + assertEquals( + "Should use the default value when the property is not set", + BigQueryJdbcUrlUtility.DEFAULT_METADATA_FETCH_THREAD_COUNT_VALUE, + connectionDefault.getMetadataFetchThreadCount()); + } + + // Test Case 2: Should use the custom value when a valid integer is provided. + String urlCustom = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;" + + "MetaDataFetchThreadCount=16;"; + try (BigQueryConnection connectionCustom = new BigQueryConnection(urlCustom)) { + assertEquals( + "Should use the custom value when a valid integer is provided", + 16, + connectionCustom.getMetadataFetchThreadCount()); + } + } + + @Test + public void testBigQueryReadClientKeepAliveSettings() throws SQLException, IOException { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;"; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + BigQueryReadClient readClient = connection.getBigQueryReadClient(); + assertNotNull(readClient); + + TransportChannelProvider provider = readClient.getSettings().getTransportChannelProvider(); + assertTrue(provider instanceof InstantiatingGrpcChannelProvider); + + InstantiatingGrpcChannelProvider grpcProvider = (InstantiatingGrpcChannelProvider) provider; + assertEquals(java.time.Duration.ofSeconds(10), grpcProvider.getKeepAliveTimeDuration()); + assertEquals(java.time.Duration.ofSeconds(5), grpcProvider.getKeepAliveTimeoutDuration()); + assertTrue(grpcProvider.getKeepAliveWithoutCalls()); + } + } + + @Test + public void testBigQueryJobCreationMode_required() throws Exception { + String url = BASE_URL + "JobCreationMode=1;"; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + BigQuery bq = connection.getBigQuery(); + assertEquals( + bq.getOptions().getDefaultJobCreationMode(), JobCreationMode.JOB_CREATION_REQUIRED); + } + } + + @Test + public void testBigQueryJobCreationMode_optional() throws Exception { + String url = BASE_URL + "JobCreationMode=2;"; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + BigQuery bq = connection.getBigQuery(); + assertEquals( + bq.getOptions().getDefaultJobCreationMode(), JobCreationMode.JOB_CREATION_OPTIONAL); + } + } + + @Test + public void testBigQueryJobCreationMode_default() throws Exception { + String url = BASE_URL; + try (BigQueryConnection connection = new BigQueryConnection(url)) { + BigQuery bq = connection.getBigQuery(); + assertEquals( + bq.getOptions().getDefaultJobCreationMode(), JobCreationMode.JOB_CREATION_OPTIONAL); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTaskTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTaskTest.java new file mode 100644 index 0000000000..b99ff4bec5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDaemonPollingTaskTest.java @@ -0,0 +1,63 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.jdbc.BigQueryResultSetFinalizers.ArrowResultSetFinalizer; +import java.lang.ref.ReferenceQueue; +import java.util.ArrayList; +import java.util.List; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryDaemonPollingTaskTest { + + static ReferenceQueue referenceQueueArrowRs; + static ReferenceQueue referenceQueueJsonRs; + static List arrowResultSetFinalizers; + static List jsonResultSetFinalizers; + + @Before + public void setUp() { + referenceQueueArrowRs = new ReferenceQueue<>(); + referenceQueueJsonRs = new ReferenceQueue<>(); + arrowResultSetFinalizers = new ArrayList<>(); + jsonResultSetFinalizers = new ArrayList<>(); + } + + @Test + public void testStartGcDaemonTask() { + + // start the Daemon first and then make sure it doesn't get started again + BigQueryDaemonPollingTask.startGcDaemonTask( + referenceQueueArrowRs, + referenceQueueJsonRs, + arrowResultSetFinalizers, + jsonResultSetFinalizers); // Daemon thread might have already started by the Junit at + // BigQueryStatementTest, hence we ignore the response here and + // check it on the line below + + assertThat( + BigQueryDaemonPollingTask.startGcDaemonTask( + referenceQueueArrowRs, + referenceQueueJsonRs, + arrowResultSetFinalizers, + jsonResultSetFinalizers)) + .isFalse(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaDataTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaDataTest.java new file mode 100644 index 0000000000..536aae15bf --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDatabaseMetaDataTest.java @@ -0,0 +1,3209 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertSame; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.*; + +import com.google.api.gax.paging.Page; +import com.google.cloud.bigquery.*; +import com.google.cloud.bigquery.BigQuery.RoutineListOption; +import java.io.IOException; +import java.io.InputStream; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.*; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Future; +import java.util.regex.Pattern; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryDatabaseMetaDataTest { + + private BigQueryConnection bigQueryConnection; + private BigQueryDatabaseMetaData dbMetadata; + private BigQuery bigqueryClient; + + @Before + public void setUp() throws SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + bigqueryClient = mock(BigQuery.class); + Statement mockStatement = mock(Statement.class); + + when(bigQueryConnection.getConnectionUrl()).thenReturn("jdbc:bigquery://test-project"); + when(bigQueryConnection.getBigQuery()).thenReturn(bigqueryClient); + when(bigQueryConnection.createStatement()).thenReturn(mockStatement); + + dbMetadata = new BigQueryDatabaseMetaData(bigQueryConnection); + } + + private Table mockBigQueryTable( + String project, String dataset, String table, TableDefinition.Type type, String description) { + Table mockTable = mock(Table.class); + TableId mockTableId = TableId.of(project, dataset, table); + TableDefinition mockDefinition = mock(TableDefinition.class); + + when(mockTable.getTableId()).thenReturn(mockTableId); + when(mockTable.getDefinition()).thenReturn(mockDefinition); + when(mockDefinition.getType()).thenReturn(type); + when(mockTable.getDescription()).thenReturn(description); + + return mockTable; + } + + private StandardSQLDataType mockStandardSQLDataType(StandardSQLTypeName typeKind) { + StandardSQLDataType mockDataType = mock(StandardSQLDataType.class); + when(mockDataType.getTypeKind()).thenReturn(typeKind.name()); + return mockDataType; + } + + private RoutineArgument mockRoutineArgument(String name, StandardSQLTypeName type, String mode) { + RoutineArgument mockArg = mock(RoutineArgument.class); + when(mockArg.getName()).thenReturn(name); + StandardSQLDataType mockDataType = mockStandardSQLDataType(type); + when(mockArg.getDataType()).thenReturn(mockDataType); + when(mockArg.getMode()).thenReturn(mode); // "IN", "OUT", "INOUT", or null + return mockArg; + } + + private Routine mockBigQueryRoutineWithArgs( + String project, + String dataset, + String routineName, + String routineType, + String description, + List arguments) { + Routine mockRoutine = mock(Routine.class); + RoutineId mockRoutineId = RoutineId.of(project, dataset, routineName); + when(mockRoutine.getRoutineId()).thenReturn(mockRoutineId); + when(mockRoutine.getRoutineType()).thenReturn(routineType); + when(mockRoutine.getDescription()).thenReturn(description); + if (arguments != null) { + when(mockRoutine.getArguments()).thenReturn(arguments); + } else { + when(mockRoutine.getArguments()).thenReturn(Collections.emptyList()); + } + return mockRoutine; + } + + private StandardSQLTableType mockStandardSQLTableType(List columns) { + StandardSQLTableType mockTableType = mock(StandardSQLTableType.class); + when(mockTableType.getColumns()).thenReturn(columns); + return mockTableType; + } + + private StandardSQLField mockStandardSQLField(String name, StandardSQLTypeName type) { + StandardSQLField mockField = mock(StandardSQLField.class); + StandardSQLDataType mockedDataType = mockStandardSQLDataType(type); + when(mockField.getName()).thenReturn(name); + when(mockField.getDataType()).thenReturn(mockedDataType); + return mockField; + } + + @Test + public void testBigqueryDatabaseMetaDataGetters() throws SQLException { + BigQueryDatabaseMetaData dbMetadata = new BigQueryDatabaseMetaData(bigQueryConnection); + assertEquals("GoogleJDBCDriverForGoogleBigQuery", dbMetadata.getDriverName()); + assertEquals("Google BigQuery", dbMetadata.getDatabaseProductName()); + assertEquals("2.0", dbMetadata.getDatabaseProductVersion()); + assertEquals("Dataset", dbMetadata.getSchemaTerm()); + assertEquals("Procedure", dbMetadata.getProcedureTerm()); + assertEquals("Project", dbMetadata.getCatalogTerm()); + } + + @Test + public void testReadSqlFromFile() throws SQLException { + BigQueryDatabaseMetaData dbMetadata = new BigQueryDatabaseMetaData(bigQueryConnection); + + String primaryKeysQuery = + BigQueryDatabaseMetaData.readSqlFromFile("DatabaseMetaData_GetPrimaryKeys.sql"); + assertTrue(primaryKeysQuery.contains("pk$")); + + try { + when(bigQueryConnection.prepareStatement(primaryKeysQuery)).thenCallRealMethod(); + String sql = + dbMetadata.replaceSqlParameters( + primaryKeysQuery, "project_name", "dataset_name", "table_name"); + assertTrue(sql.contains("project_name.dataset_name.INFORMATION_SCHEMA.KEY_COLUMN_USAGE")); + } catch (SQLException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testNeedsListing() { + assertTrue("Null pattern should require listing", dbMetadata.needsListing(null)); + assertTrue("Pattern with % should require listing", dbMetadata.needsListing("abc%def")); + assertTrue("Pattern with _ should require listing", dbMetadata.needsListing("abc_def")); + assertTrue("Pattern with both wildcards", dbMetadata.needsListing("a%c_d%f")); + assertFalse("Empty pattern should not require listing", dbMetadata.needsListing("")); + assertFalse("Pattern without wildcards", dbMetadata.needsListing("exactName")); + } + + @Test + public void testCompileSqlLikePattern() { + // Null input -> Null pattern + assertNull(dbMetadata.compileSqlLikePattern(null)); + + // Empty input -> Pattern matching nothing ($^) + Pattern emptyPattern = dbMetadata.compileSqlLikePattern(""); + assertNotNull(emptyPattern); + assertFalse(emptyPattern.matcher("").matches()); + assertFalse(emptyPattern.matcher("a").matches()); + assertEquals("(?!)", emptyPattern.pattern()); + + // Exact match + Pattern exactPattern = dbMetadata.compileSqlLikePattern("tableName"); + assertNotNull(exactPattern); + assertTrue(exactPattern.matcher("tableName").matches()); + assertTrue(exactPattern.matcher("TABLENAME").matches()); + assertFalse(exactPattern.matcher("tableNameX").matches()); + assertFalse(exactPattern.matcher("XtableName").matches()); + + // Percent wildcard (%) -> .* + Pattern percentPattern = dbMetadata.compileSqlLikePattern("table%"); + assertNotNull(percentPattern); + assertTrue(percentPattern.matcher("table").matches()); + assertTrue(percentPattern.matcher("tableName").matches()); + assertTrue(percentPattern.matcher("TABLE_123").matches()); + assertFalse(percentPattern.matcher("myTable").matches()); + + Pattern percentPattern2 = dbMetadata.compileSqlLikePattern("%Name"); + assertNotNull(percentPattern2); + assertTrue(percentPattern2.matcher("Name").matches()); + assertTrue(percentPattern2.matcher("tableName").matches()); + assertTrue(percentPattern2.matcher("VIEW_NAME").matches()); + assertFalse(percentPattern2.matcher("NameSuffix").matches()); + + Pattern percentPattern3 = dbMetadata.compileSqlLikePattern("ta%le"); + assertNotNull(percentPattern3); + assertTrue(percentPattern3.matcher("table").matches()); + assertTrue(percentPattern3.matcher("TALLE").matches()); + assertTrue(percentPattern3.matcher("tale").matches()); + assertFalse(percentPattern3.matcher("table123").matches()); + + // Underscore wildcard (_) -> . + Pattern underscorePattern = dbMetadata.compileSqlLikePattern("t_ble"); + assertNotNull(underscorePattern); + assertTrue(underscorePattern.matcher("table").matches()); + assertTrue(underscorePattern.matcher("tAble").matches()); + assertTrue(underscorePattern.matcher("tXble").matches()); + assertFalse(underscorePattern.matcher("tble").matches()); + assertFalse(underscorePattern.matcher("taable").matches()); + + // Mixed wildcards + Pattern mixedPattern = dbMetadata.compileSqlLikePattern("data_%_set%"); + assertNotNull(mixedPattern); + assertTrue(mixedPattern.matcher("data_1_set").matches()); + assertTrue(mixedPattern.matcher("data_foo_set_bar").matches()); + assertTrue(mixedPattern.matcher("DATA_X_SET").matches()); + assertFalse(mixedPattern.matcher("dataset").matches()); + assertFalse(mixedPattern.matcher("data_set").matches()); + + // Escaping regex metacharacters + Pattern dotPattern = dbMetadata.compileSqlLikePattern("version_1.0"); + assertNotNull(dotPattern); + assertTrue(dotPattern.matcher("version_1.0").matches()); + assertFalse(dotPattern.matcher("version_1X0").matches()); + + Pattern bracketPattern = dbMetadata.compileSqlLikePattern("array[0]"); + assertNotNull(bracketPattern); + assertTrue(bracketPattern.matcher("array[0]").matches()); + assertFalse(bracketPattern.matcher("array_0_").matches()); + } + + @Test + public void testMapBigQueryTypeToJdbc_ScalarTypes() { + // INT64 -> BIGINT + Field fieldInt64 = + Field.newBuilder("test_int", StandardSQLTypeName.INT64) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoInt64 = + dbMetadata.mapBigQueryTypeToJdbc(fieldInt64); + assertEquals(Types.BIGINT, infoInt64.jdbcType); + assertEquals("BIGINT", infoInt64.typeName); + assertEquals(Integer.valueOf(19), infoInt64.columnSize); + assertEquals(Integer.valueOf(0), infoInt64.decimalDigits); + assertEquals(Integer.valueOf(10), infoInt64.numPrecRadix); + + // STRING -> NVARCHAR + Field fieldString = + Field.newBuilder("test_string", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoString = + dbMetadata.mapBigQueryTypeToJdbc(fieldString); + assertEquals(Types.NVARCHAR, infoString.jdbcType); + assertEquals("NVARCHAR", infoString.typeName); + assertNull(infoString.columnSize); + assertNull(infoString.decimalDigits); + assertNull(infoString.numPrecRadix); + + // BOOL -> BOOLEAN + Field fieldBool = + Field.newBuilder("test_bool", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBool = dbMetadata.mapBigQueryTypeToJdbc(fieldBool); + assertEquals(Types.BOOLEAN, infoBool.jdbcType); + assertEquals("BOOLEAN", infoBool.typeName); + assertEquals(Integer.valueOf(1), infoBool.columnSize); + + // BYTES -> VARBINARY + Field fieldBytes = + Field.newBuilder("test_bytes", StandardSQLTypeName.BYTES) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBytes = + dbMetadata.mapBigQueryTypeToJdbc(fieldBytes); + assertEquals(Types.VARBINARY, infoBytes.jdbcType); + assertEquals("VARBINARY", infoBytes.typeName); + assertNull(infoBytes.columnSize); + + // TIMESTAMP -> TIMESTAMP + Field fieldTimestamp = + Field.newBuilder("test_ts", StandardSQLTypeName.TIMESTAMP) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoTimestamp = + dbMetadata.mapBigQueryTypeToJdbc(fieldTimestamp); + assertEquals(Types.TIMESTAMP, infoTimestamp.jdbcType); + assertEquals("TIMESTAMP", infoTimestamp.typeName); + assertEquals(Integer.valueOf(29), infoTimestamp.columnSize); + assertNull(infoTimestamp.decimalDigits); + assertNull(infoTimestamp.numPrecRadix); + + // DATETIME -> TIMESTAMP + Field fieldDateTime = + Field.newBuilder("test_dt", StandardSQLTypeName.DATETIME) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoDateTime = + dbMetadata.mapBigQueryTypeToJdbc(fieldDateTime); + assertEquals(Types.TIMESTAMP, infoDateTime.jdbcType); + assertEquals("TIMESTAMP", infoDateTime.typeName); + assertEquals(Integer.valueOf(29), infoDateTime.columnSize); + assertNull(infoDateTime.decimalDigits); + assertNull(infoDateTime.numPrecRadix); + + // NUMERIC -> NUMERIC + Field fieldNumeric = + Field.newBuilder("test_num", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoNumeric = + dbMetadata.mapBigQueryTypeToJdbc(fieldNumeric); + assertEquals(Types.NUMERIC, infoNumeric.jdbcType); + assertEquals("NUMERIC", infoNumeric.typeName); + assertEquals(Integer.valueOf(38), infoNumeric.columnSize); + assertEquals(Integer.valueOf(9), infoNumeric.decimalDigits); + assertEquals(Integer.valueOf(10), infoNumeric.numPrecRadix); + + // BIGNUMERIC -> NUMERIC + Field fieldBigNumeric = + Field.newBuilder("test_bignum", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBigNumeric = + dbMetadata.mapBigQueryTypeToJdbc(fieldBigNumeric); + assertEquals(Types.NUMERIC, infoBigNumeric.jdbcType); + assertEquals("NUMERIC", infoBigNumeric.typeName); + assertEquals(Integer.valueOf(77), infoBigNumeric.columnSize); + assertEquals(Integer.valueOf(38), infoBigNumeric.decimalDigits); + assertEquals(Integer.valueOf(10), infoBigNumeric.numPrecRadix); + + // GEOGRAPHY -> VARCHAR + Field fieldGeo = + Field.newBuilder("test_geo", StandardSQLTypeName.GEOGRAPHY) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoGeo = dbMetadata.mapBigQueryTypeToJdbc(fieldGeo); + assertEquals(Types.VARCHAR, infoGeo.jdbcType); + assertEquals("VARCHAR", infoGeo.typeName); + assertNull(infoGeo.columnSize); + + // DATE + Field fieldDate = + Field.newBuilder("test_date", StandardSQLTypeName.DATE) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoDate = dbMetadata.mapBigQueryTypeToJdbc(fieldDate); + assertEquals(Types.DATE, infoDate.jdbcType); + assertEquals("DATE", infoDate.typeName); + + // TIME + Field fieldTime = + Field.newBuilder("test_time", StandardSQLTypeName.TIME) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoTime = dbMetadata.mapBigQueryTypeToJdbc(fieldTime); + assertEquals(Types.TIME, infoTime.jdbcType); + assertEquals("TIME", infoTime.typeName); + + // STRUCT + Field fieldStruct = + Field.newBuilder( + "test_struct", + StandardSQLTypeName.STRUCT, + Field.of("sub_field", StandardSQLTypeName.STRING)) + .setMode(Field.Mode.NULLABLE) + .build(); + BigQueryDatabaseMetaData.ColumnTypeInfo infoStruct = + dbMetadata.mapBigQueryTypeToJdbc(fieldStruct); + assertEquals(Types.STRUCT, infoStruct.jdbcType); + assertEquals("STRUCT", infoStruct.typeName); + } + + @Test + public void testMapBigQueryTypeToJdbc_ArrayType() { + Field fieldArray = + Field.newBuilder("test_array", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REPEATED) + .build(); + + BigQueryDatabaseMetaData.ColumnTypeInfo infoArray = + dbMetadata.mapBigQueryTypeToJdbc(fieldArray); + assertEquals(Types.ARRAY, infoArray.jdbcType); + assertEquals("ARRAY", infoArray.typeName); + assertNull(infoArray.columnSize); + assertNull(infoArray.decimalDigits); + assertNull(infoArray.numPrecRadix); + } + + @Test + public void testCreateColumnRow() { + Field realField = + Field.newBuilder("user_name", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("Name of the user") + .build(); + + String catalog = "test-project"; + String schema = "test_dataset"; + String table = "users"; + int ordinalPos = 3; + + List row = + dbMetadata.createColumnRow(catalog, schema, table, realField, ordinalPos); + + assertNotNull(row); + assertEquals(24, row.size()); + + assertEquals(catalog, row.get(0).getStringValue()); // 1. TABLE_CAT + assertEquals(schema, row.get(1).getStringValue()); // 2. TABLE_SCHEM + assertEquals(table, row.get(2).getStringValue()); // 3. TABLE_NAME + assertEquals("user_name", row.get(3).getStringValue()); // 4. COLUMN_NAME + assertEquals(String.valueOf(Types.NVARCHAR), row.get(4).getStringValue()); // 5. DATA_TYPE + assertEquals("NVARCHAR", row.get(5).getStringValue()); // 6. TYPE_NAME + assertTrue(row.get(6).isNull()); // 7. COLUMN_SIZE (was null for STRING) + assertTrue(row.get(7).isNull()); // 8. BUFFER_LENGTH (always null) + assertTrue(row.get(8).isNull()); // 9. DECIMAL_DIGITS (null for STRING) + assertTrue(row.get(9).isNull()); // 10. NUM_PREC_RADIX (null for STRING) + assertEquals( + String.valueOf(DatabaseMetaData.columnNullable), + row.get(10).getStringValue()); // 11. NULLABLE + assertEquals("Name of the user", row.get(11).getStringValue()); // 12. REMARKS + assertTrue(row.get(12).isNull()); // 13. COLUMN_DEF (null) + // 14, 15 unused + assertTrue(row.get(15).isNull()); // 16. CHAR_OCTET_LENGTH (was null) + assertEquals(String.valueOf(ordinalPos), row.get(16).getStringValue()); // 17. ORDINAL_POSITION + assertEquals("YES", row.get(17).getStringValue()); // 18. IS_NULLABLE (YES for NULLABLE mode) + // 19-22 SCOPE/SOURCE (null) + assertEquals("NO", row.get(22).getStringValue()); // 23. IS_AUTOINCREMENT (NO) + assertEquals("NO", row.get(23).getStringValue()); // 24. IS_GENERATEDCOLUMN (NO) + } + + @Test + public void testCreateColumnRow_RequiredInt() { + Field realField = + Field.newBuilder("user_id", StandardSQLTypeName.INT64).setMode(Field.Mode.REQUIRED).build(); + + String catalog = "p"; + String schema = "d"; + String table = "t"; + int ordinalPos = 1; + + List row = + dbMetadata.createColumnRow(catalog, schema, table, realField, ordinalPos); + + assertEquals(24, row.size()); + assertEquals("user_id", row.get(3).getStringValue()); // COLUMN_NAME + assertEquals(String.valueOf(Types.BIGINT), row.get(4).getStringValue()); // DATA_TYPE + assertEquals("BIGINT", row.get(5).getStringValue()); // TYPE_NAME + assertEquals("19", row.get(6).getStringValue()); // COLUMN_SIZE + assertEquals("0", row.get(8).getStringValue()); // DECIMAL_DIGITS + assertEquals("10", row.get(9).getStringValue()); // NUM_PREC_RADIX + assertEquals( + String.valueOf(DatabaseMetaData.columnNoNulls), row.get(10).getStringValue()); // NULLABLE + assertTrue(row.get(11).isNull()); // REMARKS (null description) + assertEquals(String.valueOf(ordinalPos), row.get(16).getStringValue()); // ORDINAL_POSITION + assertEquals("NO", row.get(17).getStringValue()); // IS_NULLABLE (NO for REQUIRED mode) + } + + @Test + public void testDefineGetTablesSchema() { + Schema schema = dbMetadata.defineGetTablesSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(10, fields.size()); + + Field tableCat = fields.get("TABLE_CAT"); + assertEquals("TABLE_CAT", tableCat.getName()); + assertEquals(StandardSQLTypeName.STRING, tableCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableCat.getMode()); + + Field tableName = fields.get("TABLE_NAME"); + assertEquals("TABLE_NAME", tableName.getName()); + assertEquals(StandardSQLTypeName.STRING, tableName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableName.getMode()); + + Field tableType = fields.get("TABLE_TYPE"); + assertEquals("TABLE_TYPE", tableType.getName()); + assertEquals(StandardSQLTypeName.STRING, tableType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableType.getMode()); + + Field remarks = fields.get("REMARKS"); + assertEquals("REMARKS", remarks.getName()); + assertEquals(StandardSQLTypeName.STRING, remarks.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, remarks.getMode()); + + Field refGeneration = fields.get("REF_GENERATION"); + assertEquals("REF_GENERATION", refGeneration.getName()); + assertEquals(StandardSQLTypeName.STRING, refGeneration.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, refGeneration.getMode()); + } + + @Test + public void testProcessTableInfo_Basic() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj"; + String schema = "ds"; + String name = "my_table"; + TableDefinition.Type type = TableDefinition.Type.TABLE; + String description = "My test table"; + + Table table = mockBigQueryTable(catalog, schema, name, type, description); + + dbMetadata.processTableInfo(table, null, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(10, row.size()); + assertEquals(catalog, row.get("TABLE_CAT").getStringValue()); + assertEquals(schema, row.get("TABLE_SCHEM").getStringValue()); + assertEquals(name, row.get("TABLE_NAME").getStringValue()); + assertEquals(type.toString(), row.get("TABLE_TYPE").getStringValue()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertTrue(row.get("TYPE_CAT").isNull()); + assertTrue(row.get("TYPE_SCHEM").isNull()); + assertTrue(row.get("TYPE_NAME").isNull()); + assertTrue(row.get("SELF_REFERENCING_COL_NAME").isNull()); + assertTrue(row.get("REF_GENERATION").isNull()); + } + + @Test + public void testProcessTableInfo_TypeFilterMatch() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Set requestedTypes = new HashSet<>(Arrays.asList("VIEW", "TABLE")); + + Table table = mockBigQueryTable("p", "d", "t", TableDefinition.Type.TABLE, "Desc"); + + dbMetadata.processTableInfo(table, requestedTypes, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + assertEquals("TABLE", collectedResults.get(0).get("TABLE_TYPE").getStringValue()); + } + + @Test + public void testProcessTableInfo_TypeFilterMismatch() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Set requestedTypes = new HashSet<>(Collections.singletonList("VIEW")); + + Table table = mockBigQueryTable("p", "d", "t", TableDefinition.Type.TABLE, "Desc"); + + dbMetadata.processTableInfo(table, requestedTypes, collectedResults, resultSchemaFields); + + assertEquals(0, collectedResults.size()); + } + + @Test + public void testProcessTableInfo_NullDescription() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + Table table = mockBigQueryTable("p", "d", "t_no_desc", TableDefinition.Type.TABLE, null); + + dbMetadata.processTableInfo(table, null, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertTrue(row.get("REMARKS").isNull()); + } + + private FieldValueList createTableRow( + String cat, String schem, String name, String type, FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(cat)); // TABLE_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // TABLE_SCHEM + values.add(dbMetadata.createStringFieldValue(name)); // TABLE_NAME + values.add(dbMetadata.createStringFieldValue(type)); // TABLE_TYPE + values.add(dbMetadata.createStringFieldValue("Remark for " + name)); // REMARKS + values.add(dbMetadata.createNullFieldValue()); // TYPE_CAT + values.add(dbMetadata.createNullFieldValue()); // TYPE_SCHEM + values.add(dbMetadata.createNullFieldValue()); // TYPE_NAME + values.add(dbMetadata.createNullFieldValue()); // SELF_REFERENCING_COL_NAME + values.add(dbMetadata.createNullFieldValue()); // REF_GENERATION + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Tables() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Add rows in unsorted order (based on TYPE, CAT, SCHEM, NAME) + results.add(createTableRow("cat_a", "sch_z", "table_1", "TABLE", schemaFields)); + results.add(createTableRow("cat_b", "sch_a", "view_1", "VIEW", schemaFields)); + results.add(createTableRow("cat_a", "sch_c", "table_2", "TABLE", schemaFields)); + results.add(createTableRow(null, "sch_b", "table_0", "TABLE", schemaFields)); + results.add(createTableRow("cat_a", "sch_z", "table_0", "TABLE", schemaFields)); + results.add(createTableRow("cat_a", null, "view_0", "VIEW", schemaFields)); + + Comparator comparator = dbMetadata.defineGetTablesComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getTables", dbMetadata.LOG); + + // Expected order: TABLEs first, then VIEWs. Within type, sort by CAT, SCHEM, NAME + assertEquals(6, results.size()); + + // 1. Null cat, sch_b, table_0, TABLE + assertTrue("Row 0 TABLE_CAT should be null", results.get(0).get("TABLE_CAT").isNull()); + assertEquals("sch_b", results.get(0).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_0", results.get(0).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(0).get("TABLE_TYPE").getStringValue()); + + // 2. cat_a, sch_c, table_2, TABLE + assertEquals("cat_a", results.get(1).get("TABLE_CAT").getStringValue()); + assertEquals("sch_c", results.get(1).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_2", results.get(1).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(1).get("TABLE_TYPE").getStringValue()); + + // 3. cat_a, sch_z, table_0, TABLE + assertEquals("cat_a", results.get(2).get("TABLE_CAT").getStringValue()); + assertEquals("sch_z", results.get(2).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_0", results.get(2).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(2).get("TABLE_TYPE").getStringValue()); + + // 4. cat_a, sch_z, table_1, TABLE + assertEquals("cat_a", results.get(3).get("TABLE_CAT").getStringValue()); + assertEquals("sch_z", results.get(3).get("TABLE_SCHEM").getStringValue()); + assertEquals("table_1", results.get(3).get("TABLE_NAME").getStringValue()); + assertEquals("TABLE", results.get(3).get("TABLE_TYPE").getStringValue()); + + // 5. cat_a, null, view_0, VIEW + assertEquals("cat_a", results.get(4).get("TABLE_CAT").getStringValue()); + assertTrue("Row 4 TABLE_SCHEM should be null", results.get(4).get("TABLE_SCHEM").isNull()); + assertEquals("view_0", results.get(4).get("TABLE_NAME").getStringValue()); + assertEquals("VIEW", results.get(4).get("TABLE_TYPE").getStringValue()); + + // 6. cat_b, sch_a, view_1, VIEW + assertEquals("cat_b", results.get(5).get("TABLE_CAT").getStringValue()); + assertEquals("sch_a", results.get(5).get("TABLE_SCHEM").getStringValue()); + assertEquals("view_1", results.get(5).get("TABLE_NAME").getStringValue()); + assertEquals("VIEW", results.get(5).get("TABLE_TYPE").getStringValue()); + } + + @Test + public void testSortResults_Tables_EmptyList() { + Schema resultSchema = dbMetadata.defineGetTablesSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + Comparator comparator = dbMetadata.defineGetTablesComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getTables", dbMetadata.LOG); + assertTrue(results.isEmpty()); + } + + @Test + public void testDefineGetSchemasSchema() { + Schema schema = dbMetadata.defineGetSchemasSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(2, fields.size()); + + Field tableSchem = fields.get("TABLE_SCHEM"); + assertEquals("TABLE_SCHEM", tableSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, tableSchem.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableSchem.getMode()); + + Field tableCatalog = fields.get("TABLE_CATALOG"); + assertEquals("TABLE_CATALOG", tableCatalog.getName()); + assertEquals(StandardSQLTypeName.STRING, tableCatalog.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableCatalog.getMode()); + } + + private Dataset mockBigQueryDataset(String project, String datasetName) { + Dataset mockDataset = mock(Dataset.class); + DatasetId mockDatasetId = DatasetId.of(project, datasetName); + when(mockDataset.getDatasetId()).thenReturn(mockDatasetId); + return mockDataset; + } + + @Test + public void testProcessSchemaInfo() { + Schema resultSchema = dbMetadata.defineGetSchemasSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "project-alpha"; + String schemaName = "dataset_beta"; + Dataset dataset = mockBigQueryDataset(catalog, schemaName); + + dbMetadata.processSchemaInfo(dataset, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(2, row.size()); + assertEquals(schemaName, row.get("TABLE_SCHEM").getStringValue()); + assertEquals(catalog, row.get("TABLE_CATALOG").getStringValue()); + } + + private FieldValueList createSchemaRow(String cat, String schem, FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(schem)); // TABLE_SCHEM + values.add(dbMetadata.createStringFieldValue(cat)); // TABLE_CATALOG + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Schemas() { + Schema resultSchema = dbMetadata.defineGetSchemasSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + results.add(createSchemaRow("proj_b", "schema_z", schemaFields)); + results.add(createSchemaRow("proj_a", "schema_y", schemaFields)); + results.add(createSchemaRow(null, "schema_x", schemaFields)); + results.add(createSchemaRow("proj_b", "schema_a", schemaFields)); + results.add(createSchemaRow("proj_a", "schema_c", schemaFields)); + + Comparator comparator = dbMetadata.defineGetSchemasComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getSchemas", dbMetadata.LOG); + + // Expected order: Sort by TABLE_CATALOG (nulls first), then TABLE_SCHEM + assertEquals(5, results.size()); + + // 1. Null catalog, schema_x + assertTrue("Row 0 TABLE_CATALOG should be null", results.get(0).get("TABLE_CATALOG").isNull()); + assertEquals("schema_x", results.get(0).get("TABLE_SCHEM").getStringValue()); + + // 2. proj_a, schema_c + assertEquals("proj_a", results.get(1).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_c", results.get(1).get("TABLE_SCHEM").getStringValue()); + + // 3. proj_a, schema_y + assertEquals("proj_a", results.get(2).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_y", results.get(2).get("TABLE_SCHEM").getStringValue()); + + // 4. proj_b, schema_a + assertEquals("proj_b", results.get(3).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_a", results.get(3).get("TABLE_SCHEM").getStringValue()); + + // 5. proj_b, schema_z + assertEquals("proj_b", results.get(4).get("TABLE_CATALOG").getStringValue()); + assertEquals("schema_z", results.get(4).get("TABLE_SCHEM").getStringValue()); + } + + @Test + public void testSortResults_Schemas_EmptyList() { + Schema resultSchema = dbMetadata.defineGetSchemasSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + Comparator comparator = dbMetadata.defineGetSchemasComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getSchemas", dbMetadata.LOG); + assertTrue(results.isEmpty()); + } + + private Routine mockBigQueryRoutine( + String project, String dataset, String routineName, String routineType, String description) { + Routine mockRoutine = mock(Routine.class); + RoutineId mockRoutineId = RoutineId.of(project, dataset, routineName); + when(mockRoutine.getRoutineId()).thenReturn(mockRoutineId); + when(mockRoutine.getRoutineType()).thenReturn(routineType); + when(mockRoutine.getDescription()).thenReturn(description); + return mockRoutine; + } + + @Test + public void testDefineGetProceduresSchema() { + Schema schema = dbMetadata.defineGetProceduresSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(9, fields.size()); + + Field procCat = fields.get("PROCEDURE_CAT"); + assertEquals("PROCEDURE_CAT", procCat.getName()); + assertEquals(StandardSQLTypeName.STRING, procCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, procCat.getMode()); + + Field procName = fields.get("PROCEDURE_NAME"); + assertEquals("PROCEDURE_NAME", procName.getName()); + assertEquals(StandardSQLTypeName.STRING, procName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, procName.getMode()); + + Field remarks = fields.get("REMARKS"); + assertEquals("REMARKS", remarks.getName()); + assertEquals(StandardSQLTypeName.STRING, remarks.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, remarks.getMode()); + + Field procType = fields.get("PROCEDURE_TYPE"); + assertEquals("PROCEDURE_TYPE", procType.getName()); + assertEquals(StandardSQLTypeName.INT64, procType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, procType.getMode()); + + Field specificName = fields.get("SPECIFIC_NAME"); + assertEquals("SPECIFIC_NAME", specificName.getName()); + assertEquals(StandardSQLTypeName.STRING, specificName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, specificName.getMode()); + } + + @Test + public void testProcessProcedureInfo_BasicProcedure() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-1"; + String schema = "dataset_a"; + String name = "my_proc"; + String description = "A test procedure"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "PROCEDURE", description); + + dbMetadata.processProcedureInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(9, row.size()); + assertEquals(catalog, row.get("PROCEDURE_CAT").getStringValue()); + assertEquals(schema, row.get("PROCEDURE_SCHEM").getStringValue()); + assertEquals(name, row.get("PROCEDURE_NAME").getStringValue()); + assertTrue(row.get("reserved1").isNull()); + assertTrue(row.get("reserved2").isNull()); + assertTrue(row.get("reserved3").isNull()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.procedureResultUnknown), + row.get("PROCEDURE_TYPE").getStringValue()); + assertEquals(name, row.get("SPECIFIC_NAME").getStringValue()); + } + + @Test + public void testProcessProcedureInfo_NullDescription() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-1"; + String schema = "dataset_a"; + String name = "proc_no_desc"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "PROCEDURE", null); + + dbMetadata.processProcedureInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertTrue(row.get("REMARKS").isNull()); + } + + @Test + public void testProcessProcedureInfo_SkipsNonProcedure() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + Routine routine = mockBigQueryRoutine("p", "d", "my_func", "FUNCTION", "A function"); + + dbMetadata.processProcedureInfo(routine, collectedResults, resultSchemaFields); + + assertTrue(collectedResults.isEmpty()); + } + + private FieldValueList createProcedureRow( + String cat, String schem, String name, String specName, FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(cat)); // PROCEDURE_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // PROCEDURE_SCHEM + values.add(dbMetadata.createStringFieldValue(name)); // PROCEDURE_NAME + values.add(dbMetadata.createNullFieldValue()); // reserved1 + values.add(dbMetadata.createNullFieldValue()); // reserved2 + values.add(dbMetadata.createNullFieldValue()); // reserved3 + values.add(dbMetadata.createStringFieldValue("Remark for " + name)); // REMARKS + values.add( + dbMetadata.createLongFieldValue( + (long) DatabaseMetaData.procedureResultUnknown)); // PROCEDURE_TYPE + values.add(dbMetadata.createStringFieldValue(specName)); // SPECIFIC_NAME + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Procedures() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Add rows in unsorted order (Sort by CAT, SCHEM, NAME, SPECIFIC_NAME) + results.add(createProcedureRow("cat_b", "sch_c", "proc_1", "proc_1_spec", schemaFields)); + results.add( + createProcedureRow("cat_a", "sch_z", "proc_alpha", "proc_alpha_spec", schemaFields)); + results.add(createProcedureRow("cat_a", "sch_z", "proc_beta", "proc_beta_spec", schemaFields)); + results.add(createProcedureRow(null, "sch_y", "proc_gamma", "proc_gamma_spec", schemaFields)); + results.add(createProcedureRow("cat_a", null, "proc_delta", "proc_delta_spec", schemaFields)); + results.add( + createProcedureRow("cat_a", "sch_z", "proc_alpha", "proc_alpha_spec_older", schemaFields)); + + Comparator comparator = dbMetadata.defineGetProceduresComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getProcedures", dbMetadata.LOG); + + // Expected Order: Null Cat, then Cat A (Null Schem, then sch_z), then Cat B. Within that, Name, + // then Spec Name. + assertEquals(6, results.size()); + + // 1. Null cat, sch_y, proc_gamma, proc_gamma_spec + assertTrue("Row 0 PROC_CAT should be null", results.get(0).get("PROCEDURE_CAT").isNull()); + assertEquals("sch_y", results.get(0).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_gamma", results.get(0).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_gamma_spec", results.get(0).get("SPECIFIC_NAME").getStringValue()); + + // 2. cat_a, Null schem, proc_delta, proc_delta_spec + assertEquals("cat_a", results.get(1).get("PROCEDURE_CAT").getStringValue()); + assertTrue("Row 1 PROC_SCHEM should be null", results.get(1).get("PROCEDURE_SCHEM").isNull()); + assertEquals("proc_delta", results.get(1).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_delta_spec", results.get(1).get("SPECIFIC_NAME").getStringValue()); + + // 3. cat_a, sch_z, proc_alpha, "proc_alpha_spec" (comes first lexicographically) <<< CORRECTED + // EXPECTATION + assertEquals("cat_a", results.get(2).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(2).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(2).get("PROCEDURE_NAME").getStringValue()); + assertEquals( + "proc_alpha_spec", results.get(2).get("SPECIFIC_NAME").getStringValue()); // <<< CORRECTED + + // 4. cat_a, sch_z, proc_alpha, "proc_alpha_spec_older" (comes second lexicographically) <<< + // CORRECTED EXPECTATION + assertEquals("cat_a", results.get(3).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(3).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(3).get("PROCEDURE_NAME").getStringValue()); + assertEquals( + "proc_alpha_spec_older", + results.get(3).get("SPECIFIC_NAME").getStringValue()); // <<< CORRECTED + + // 5. cat_a, sch_z, proc_beta, proc_beta_spec + assertEquals("cat_a", results.get(4).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(4).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_beta", results.get(4).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_beta_spec", results.get(4).get("SPECIFIC_NAME").getStringValue()); + + // 6. cat_b, sch_c, proc_1, proc_1_spec + assertEquals("cat_b", results.get(5).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_c", results.get(5).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_1", results.get(5).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_1_spec", results.get(5).get("SPECIFIC_NAME").getStringValue()); + } + + @Test + public void testSortResults_Procedures_EmptyList() { + Schema resultSchema = dbMetadata.defineGetProceduresSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + Comparator comparator = dbMetadata.defineGetProceduresComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getProcedures", dbMetadata.LOG); + assertTrue(results.isEmpty()); + } + + @Test + public void testFindMatchingBigQueryObjects_Routines_ListWithPattern() { + String catalog = "p-cat"; + String schema = "d-sch"; + String pattern = "proc_%"; + DatasetId datasetId = DatasetId.of(catalog, schema); + + Routine proc1 = mockBigQueryRoutine(catalog, schema, "proc_abc", "PROCEDURE", "p1"); + Routine proc2 = mockBigQueryRoutine(catalog, schema, "proc_xyz", "PROCEDURE", "p2"); + Routine func1 = mockBigQueryRoutine(catalog, schema, "func_123", "FUNCTION", "f1"); + Routine otherProc = mockBigQueryRoutine(catalog, schema, "another_proc", "PROCEDURE", "p3"); + + Page page = mock(Page.class); + when(page.iterateAll()).thenReturn(Arrays.asList(proc1, func1, proc2, otherProc)); + when(bigqueryClient.listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class))) + .thenReturn(page); + + Pattern regex = dbMetadata.compileSqlLikePattern(pattern); + assertNotNull(regex); + + List results = + dbMetadata.findMatchingBigQueryObjects( + "Routine", + () -> bigqueryClient.listRoutines(datasetId, BigQuery.RoutineListOption.pageSize(500)), + (name) -> + bigqueryClient.getRoutine( + RoutineId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + pattern, + regex, + dbMetadata.LOG); + + verify(bigqueryClient, times(1)) + .listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class)); + verify(bigqueryClient, never()).getRoutine(any(RoutineId.class)); + + assertNotNull(results); + List resultList = new ArrayList<>(results); + + assertEquals("Should contain only matching routines", 2, resultList.size()); + assertTrue("Should contain proc_abc", resultList.contains(proc1)); + assertTrue("Should contain proc_xyz", resultList.contains(proc2)); + assertFalse("Should not contain func_123", resultList.contains(func1)); + assertFalse("Should not contain another_proc", resultList.contains(otherProc)); + } + + @Test + public void testFindMatchingBigQueryObjects_Routines_ListNoPattern() { + String catalog = "p-cat"; + String schema = "d-sch"; + String pattern = null; + DatasetId datasetId = DatasetId.of(catalog, schema); + + Routine proc1 = mockBigQueryRoutine(catalog, schema, "proc_abc", "PROCEDURE", "p1"); + Routine func1 = mockBigQueryRoutine(catalog, schema, "func_123", "FUNCTION", "f1"); + + Page page = mock(Page.class); + when(page.iterateAll()).thenReturn(Arrays.asList(proc1, func1)); + when(bigqueryClient.listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class))) + .thenReturn(page); + + Pattern regex = dbMetadata.compileSqlLikePattern(pattern); + List results = + dbMetadata.findMatchingBigQueryObjects( + "Routine", + () -> bigqueryClient.listRoutines(datasetId, BigQuery.RoutineListOption.pageSize(500)), + (name) -> + bigqueryClient.getRoutine( + RoutineId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + pattern, + regex, + dbMetadata.LOG); + + verify(bigqueryClient, times(1)) + .listRoutines(eq(datasetId), any(BigQuery.RoutineListOption.class)); + + assertNotNull(results); + List resultList = new ArrayList<>(results); + + assertEquals("Should contain all routines when pattern is null", 2, resultList.size()); + assertTrue("Should contain proc_abc", resultList.contains(proc1)); + assertTrue("Should contain func_123", resultList.contains(func1)); + } + + @Test + public void testFindMatchingBigQueryObjects_Routines_GetSpecific() { + String catalog = "p-cat"; + String schema = "d-sch"; + String procNameExact = "exactprocname"; + DatasetId datasetId = DatasetId.of(catalog, schema); + RoutineId routineId = RoutineId.of(catalog, schema, procNameExact); + Routine mockRoutine = mockBigQueryRoutine(catalog, schema, procNameExact, "PROCEDURE", "desc"); + + when(bigqueryClient.getRoutine(eq(routineId))).thenReturn(mockRoutine); + Pattern regex = dbMetadata.compileSqlLikePattern(procNameExact); + + List results = + dbMetadata.findMatchingBigQueryObjects( + "Routine", + () -> bigqueryClient.listRoutines(datasetId, BigQuery.RoutineListOption.pageSize(500)), + (name) -> + bigqueryClient.getRoutine( + RoutineId.of(datasetId.getProject(), datasetId.getDataset(), name)), + (rt) -> rt.getRoutineId().getRoutine(), + procNameExact, + regex, + dbMetadata.LOG); + + verify(bigqueryClient, times(1)).getRoutine(eq(routineId)); + verify(bigqueryClient, never()) + .listRoutines(any(DatasetId.class), any(BigQuery.RoutineListOption.class)); + + assertNotNull(results); + List resultList = new ArrayList<>(results); + assertEquals(1, resultList.size()); + assertSame(mockRoutine, resultList.get(0)); + } + + @Test + public void testDefineGetProcedureColumnsSchema() { + Schema schema = dbMetadata.defineGetProcedureColumnsSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(20, fields.size()); + + Field procCat = fields.get("PROCEDURE_CAT"); + assertEquals("PROCEDURE_CAT", procCat.getName()); + assertEquals(StandardSQLTypeName.STRING, procCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, procCat.getMode()); + + Field colName = fields.get("COLUMN_NAME"); + assertEquals("COLUMN_NAME", colName.getName()); + assertEquals(StandardSQLTypeName.STRING, colName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, colName.getMode()); + + Field colType = fields.get("COLUMN_TYPE"); + assertEquals("COLUMN_TYPE", colType.getName()); + assertEquals(StandardSQLTypeName.INT64, colType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, colType.getMode()); + + Field dataType = fields.get("DATA_TYPE"); + assertEquals("DATA_TYPE", dataType.getName()); + assertEquals(StandardSQLTypeName.INT64, dataType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, dataType.getMode()); + + Field typeName = fields.get("TYPE_NAME"); + assertEquals("TYPE_NAME", typeName.getName()); + assertEquals(StandardSQLTypeName.STRING, typeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, typeName.getMode()); + + Field ordinalPos = fields.get("ORDINAL_POSITION"); + assertEquals("ORDINAL_POSITION", ordinalPos.getName()); + assertEquals(StandardSQLTypeName.INT64, ordinalPos.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, ordinalPos.getMode()); + + Field isNullable = fields.get("IS_NULLABLE"); + assertEquals("IS_NULLABLE", isNullable.getName()); + assertEquals(StandardSQLTypeName.STRING, isNullable.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, isNullable.getMode()); + + Field specificName = fields.get("SPECIFIC_NAME"); + assertEquals("SPECIFIC_NAME", specificName.getName()); + assertEquals(StandardSQLTypeName.STRING, specificName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, specificName.getMode()); + } + + @Test + public void testDetermineTypeInfoFromDataType() { + // INT64 + StandardSQLDataType sqlInt64 = mockStandardSQLDataType(StandardSQLTypeName.INT64); + BigQueryDatabaseMetaData.ColumnTypeInfo infoInt64 = + dbMetadata.determineTypeInfoFromDataType(sqlInt64, "p", "c", 1); + assertEquals(Types.BIGINT, infoInt64.jdbcType); + assertEquals("BIGINT", infoInt64.typeName); + + // STRING + StandardSQLDataType sqlString = mockStandardSQLDataType(StandardSQLTypeName.STRING); + BigQueryDatabaseMetaData.ColumnTypeInfo infoString = + dbMetadata.determineTypeInfoFromDataType(sqlString, "p", "c", 1); + assertEquals(Types.NVARCHAR, infoString.jdbcType); + assertEquals("NVARCHAR", infoString.typeName); + + // BOOL + StandardSQLDataType sqlBool = mockStandardSQLDataType(StandardSQLTypeName.BOOL); + BigQueryDatabaseMetaData.ColumnTypeInfo infoBool = + dbMetadata.determineTypeInfoFromDataType(sqlBool, "p", "c", 1); + assertEquals(Types.BOOLEAN, infoBool.jdbcType); + assertEquals("BOOLEAN", infoBool.typeName); + + // STRUCT + StandardSQLDataType sqlStruct = mockStandardSQLDataType(StandardSQLTypeName.STRUCT); + BigQueryDatabaseMetaData.ColumnTypeInfo infoStruct = + dbMetadata.determineTypeInfoFromDataType(sqlStruct, "p", "c", 1); + assertEquals(Types.STRUCT, infoStruct.jdbcType); + assertEquals("STRUCT", infoStruct.typeName); + + // Case: null typeKind from StandardSQLDataType (should default to VARCHAR) + StandardSQLDataType sqlNullKind = mock(StandardSQLDataType.class); + when(sqlNullKind.getTypeKind()).thenReturn(null); + BigQueryDatabaseMetaData.ColumnTypeInfo infoNullKind = + dbMetadata.determineTypeInfoFromDataType(sqlNullKind, "p", "c", 1); + assertEquals(Types.VARCHAR, infoNullKind.jdbcType); + assertEquals("VARCHAR", infoNullKind.typeName); + + // Case: unknown typeKind from StandardSQLDataType (should default to VARCHAR) + StandardSQLDataType sqlUnknownKind = mock(StandardSQLDataType.class); + when(sqlUnknownKind.getTypeKind()).thenReturn("SUPER_DOOPER_TYPE"); + BigQueryDatabaseMetaData.ColumnTypeInfo infoUnknownKind = + dbMetadata.determineTypeInfoFromDataType(sqlUnknownKind, "p", "c", 1); + assertEquals(Types.VARCHAR, infoUnknownKind.jdbcType); + assertEquals("VARCHAR", infoUnknownKind.typeName); + } + + @Test + public void testCreateProcedureColumnRow_BasicInParam() { + String catalog = "proj_x"; + String schema = "data_y"; + String procName = "proc_z"; + String specificName = "proc_z_specific"; + + RoutineArgument arg = mockRoutineArgument("param_in", StandardSQLTypeName.STRING, "IN"); + int ordinalPos = 1; + + List row = + dbMetadata.createProcedureColumnRow( + catalog, schema, procName, specificName, arg, ordinalPos, "param_in"); + + assertNotNull(row); + assertEquals(20, row.size()); + + assertEquals(catalog, row.get(0).getStringValue()); // 1. PROCEDURE_CAT + assertEquals(schema, row.get(1).getStringValue()); // 2. PROCEDURE_SCHEM + assertEquals(procName, row.get(2).getStringValue()); // 3. PROCEDURE_NAME + assertEquals("param_in", row.get(3).getStringValue()); // 4. COLUMN_NAME + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnIn), + row.get(4).getStringValue()); // 5. COLUMN_TYPE + assertEquals(String.valueOf(Types.NVARCHAR), row.get(5).getStringValue()); // 6. DATA_TYPE + assertEquals("NVARCHAR", row.get(6).getStringValue()); // 7. TYPE_NAME + assertTrue(row.get(7).isNull()); // 8. PRECISION + assertTrue(row.get(8).isNull()); // 9. LENGTH + assertTrue(row.get(9).isNull()); // 10. SCALE + assertTrue(row.get(10).isNull()); // 11. RADIX + assertEquals( + String.valueOf(DatabaseMetaData.procedureNullable), + row.get(11).getStringValue()); // 12. NULLABLE + assertTrue(row.get(12).isNull()); // 13. REMARKS + assertTrue(row.get(13).isNull()); // 14. COLUMN_DEF + assertTrue(row.get(14).isNull()); // 15. SQL_DATA_TYPE + assertTrue(row.get(15).isNull()); // 16. SQL_DATETIME_SUB + assertTrue(row.get(16).isNull()); // 17. CHAR_OCTET_LENGTH + assertEquals(String.valueOf(ordinalPos), row.get(17).getStringValue()); // 18. ORDINAL_POSITION + assertEquals("YES", row.get(18).getStringValue()); // 19. IS_NULLABLE + assertEquals(specificName, row.get(19).getStringValue()); // 20. SPECIFIC_NAME + } + + @Test + public void testCreateProcedureColumnRow_NumericOutParam() { + RoutineArgument arg = mockRoutineArgument("param_out_num", StandardSQLTypeName.NUMERIC, "OUT"); + int ordinalPos = 2; + + List row = + dbMetadata.createProcedureColumnRow( + "p", "d", "proc", "proc_spec", arg, ordinalPos, "param_out_num"); + + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnOut), + row.get(4).getStringValue()); // COLUMN_TYPE + assertEquals(String.valueOf(Types.NUMERIC), row.get(5).getStringValue()); // DATA_TYPE + assertEquals("NUMERIC", row.get(6).getStringValue()); // TYPE_NAME + assertEquals("38", row.get(7).getStringValue()); // PRECISION + assertEquals("9", row.get(9).getStringValue()); // SCALE + assertEquals("10", row.get(10).getStringValue()); // RADIX + } + + @Test + public void testCreateProcedureColumnRow_InOutTimestampParam() { + RoutineArgument arg = + mockRoutineArgument("param_inout_ts", StandardSQLTypeName.TIMESTAMP, "INOUT"); + List row = + dbMetadata.createProcedureColumnRow( + "p", "d", "proc", "proc_spec", arg, 3, "param_inout_ts"); + + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnInOut), row.get(4).getStringValue()); + assertEquals(String.valueOf(Types.TIMESTAMP), row.get(5).getStringValue()); + assertEquals("TIMESTAMP", row.get(6).getStringValue()); + assertEquals("29", row.get(7).getStringValue()); // PRECISION for TIMESTAMP + } + + @Test + public void testCreateProcedureColumnRow_UnknownModeDefaultsToUnknownType() { + RoutineArgument arg = + mockRoutineArgument("param_unknown_mode", StandardSQLTypeName.BOOL, "UNKNOWN_MODE"); + List row = + dbMetadata.createProcedureColumnRow( + "p", "d", "proc", "proc_spec", arg, 1, "param_unknown_mode"); + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnUnknown), row.get(4).getStringValue()); + } + + @Test + public void testCreateProcedureColumnRow_NullArgumentObject() { + List row = + dbMetadata.createProcedureColumnRow( + "cat", "schem", "proc", "spec", null, 1, "fallback_arg_name"); + + assertNotNull(row); + assertEquals(20, row.size()); + assertEquals("fallback_arg_name", row.get(3).getStringValue()); // COLUMN_NAME + assertEquals(String.valueOf(Types.VARCHAR), row.get(5).getStringValue()); // DATA_TYPE + assertEquals("VARCHAR", row.get(6).getStringValue()); // TYPE_NAME + assertEquals( + String.valueOf(DatabaseMetaData.procedureColumnUnknown), row.get(4).getStringValue()); + } + + @Test + public void testCreateProcedureColumnRow_NullArgumentDataType() { + RoutineArgument argWithNullDataType = mock(RoutineArgument.class); + when(argWithNullDataType.getName()).thenReturn("arg_null_type"); + when(argWithNullDataType.getDataType()).thenReturn(null); + when(argWithNullDataType.getMode()).thenReturn("IN"); + + List row = + dbMetadata.createProcedureColumnRow( + "cat", "schem", "proc", "spec", argWithNullDataType, 1, "arg_null_type"); + + assertNotNull(row); + assertEquals("arg_null_type", row.get(3).getStringValue()); + assertEquals( + String.valueOf(Types.VARCHAR), row.get(5).getStringValue()); // DATA_TYPE should default + assertEquals("VARCHAR", row.get(6).getStringValue()); // TYPE_NAME should default + } + + @Test + public void testProcessProcedureArguments_NoArguments() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern noColumnNamePattern = null; // Match all columns + + Routine routine = + mockBigQueryRoutineWithArgs( + "p", "d", "proc_no_args", "PROCEDURE", "desc", Collections.emptyList()); // No arguments + + dbMetadata.processProcedureArguments( + routine, noColumnNamePattern, collectedResults, resultFields); + assertTrue(collectedResults.isEmpty()); + } + + @Test + public void testProcessProcedureArguments_WithArgumentsNoFilter() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern noColumnNamePattern = null; + + RoutineArgument arg1 = mockRoutineArgument("param1", StandardSQLTypeName.INT64, "IN"); + RoutineArgument arg2 = mockRoutineArgument("param2", StandardSQLTypeName.STRING, "OUT"); + Routine routine = + mockBigQueryRoutineWithArgs( + "p", "d", "proc_two_args", "PROCEDURE", "desc", Arrays.asList(arg1, arg2)); + + dbMetadata.processProcedureArguments( + routine, noColumnNamePattern, collectedResults, resultFields); + assertEquals(2, collectedResults.size()); + assertEquals("param1", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals("param2", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(1), collectedResults.get(0).get("ORDINAL_POSITION").getStringValue()); + assertEquals( + String.valueOf(2), collectedResults.get(1).get("ORDINAL_POSITION").getStringValue()); + } + + @Test + public void testProcessProcedureArguments_WithColumnNameFilter() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern columnNamePattern = Pattern.compile(".*_id"); // Match columns ending with _id + + RoutineArgument arg1 = mockRoutineArgument("user_id", StandardSQLTypeName.INT64, "IN"); + RoutineArgument arg2 = mockRoutineArgument("user_name", StandardSQLTypeName.STRING, "IN"); + RoutineArgument arg3 = mockRoutineArgument("session_id", StandardSQLTypeName.STRING, "INOUT"); + + Routine routine = + mockBigQueryRoutineWithArgs( + "p", "d", "proc_filtered_args", "PROCEDURE", "desc", Arrays.asList(arg1, arg2, arg3)); + + dbMetadata.processProcedureArguments( + routine, columnNamePattern, collectedResults, resultFields); + assertEquals(2, collectedResults.size()); + assertEquals("user_id", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals("session_id", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + } + + @Test + public void testProcessProcedureArguments_HandlesNullArgumentInList() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern noColumnNamePattern = null; + + RoutineArgument arg1 = mockRoutineArgument("valid_arg", StandardSQLTypeName.INT64, "IN"); + List argsWithNull = new ArrayList<>(); + argsWithNull.add(arg1); + argsWithNull.add(null); // Add a null argument + RoutineArgument arg3 = + mockRoutineArgument("another_valid_arg", StandardSQLTypeName.STRING, "OUT"); + argsWithNull.add(arg3); + + Routine routine = mock(Routine.class); + RoutineId routineId = RoutineId.of("p", "d", "proc_with_null_arg_in_list"); + when(routine.getRoutineId()).thenReturn(routineId); + when(routine.getRoutineType()).thenReturn("PROCEDURE"); + when(routine.getArguments()).thenReturn(argsWithNull); + + dbMetadata.processProcedureArguments( + routine, noColumnNamePattern, collectedResults, resultFields); + + assertEquals(3, collectedResults.size()); + assertEquals("valid_arg", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + // The fallback name generated by processProcedureArguments when arg is null in list + assertEquals( + "arg_retrieval_err_2", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(Types.VARCHAR), + collectedResults.get(1).get("DATA_TYPE").getStringValue()); // Default type for null arg + assertEquals("another_valid_arg", collectedResults.get(2).get("COLUMN_NAME").getStringValue()); + } + + private FieldValueList createProcedureColumnRowForSortTest( + String cat, + String schem, + String procName, + String specName, + String colName, + int ordinal, + FieldList schemaFields) { + List values = new ArrayList<>(20); + values.add(dbMetadata.createStringFieldValue(cat)); // 1. PROC_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // 2. PROC_SCHEM + values.add(dbMetadata.createStringFieldValue(procName)); // 3. PROC_NAME + values.add(dbMetadata.createStringFieldValue(colName)); // 4. COLUMN_NAME + values.add( + dbMetadata.createLongFieldValue( + (long) DatabaseMetaData.procedureColumnIn)); // 5. COLUMN_TYPE + values.add(dbMetadata.createLongFieldValue((long) Types.VARCHAR)); // 6. DATA_TYPE + values.add(dbMetadata.createStringFieldValue("VARCHAR")); // 7. TYPE_NAME + values.add(dbMetadata.createNullFieldValue()); // 8. PRECISION + values.add(dbMetadata.createNullFieldValue()); // 9. LENGTH + values.add(dbMetadata.createNullFieldValue()); // 10. SCALE + values.add(dbMetadata.createNullFieldValue()); // 11. RADIX + values.add( + dbMetadata.createLongFieldValue((long) DatabaseMetaData.procedureNullable)); // 12. NULLABLE + values.add(dbMetadata.createStringFieldValue("Remark for " + colName)); // 13. REMARKS + values.add(dbMetadata.createNullFieldValue()); // 14. COLUMN_DEF + values.add(dbMetadata.createNullFieldValue()); // 15. SQL_DATA_TYPE + values.add(dbMetadata.createNullFieldValue()); // 16. SQL_DATETIME_SUB + values.add(dbMetadata.createNullFieldValue()); // 17. CHAR_OCTET_LENGTH + values.add(dbMetadata.createLongFieldValue((long) ordinal)); // 18. ORDINAL_POSITION + values.add(dbMetadata.createStringFieldValue("YES")); // 19. IS_NULLABLE + values.add(dbMetadata.createStringFieldValue(specName)); // 20. SPECIFIC_NAME + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testDefineGetProcedureColumnsComparator() { + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Order: PROC_CAT, PROC_SCHEM, PROC_NAME, SPECIFIC_NAME, COLUMN_NAME + results.add( + createProcedureColumnRowForSortTest( + "cat_b", "sch_y", "proc_1", "proc_1_spec", "param_a", 1, schemaFields)); + results.add( + createProcedureColumnRowForSortTest( + "cat_a", "sch_z", "proc_alpha", "proc_alpha_spec", "arg_z", 2, schemaFields)); + results.add( + createProcedureColumnRowForSortTest( + "cat_a", + "sch_z", + "proc_alpha", + "proc_alpha_spec", + "arg_m", + 1, + schemaFields)); // Same proc, different col + results.add( + createProcedureColumnRowForSortTest( + null, "sch_x", "proc_beta", "proc_beta_spec", "col_first", 1, schemaFields)); + results.add( + createProcedureColumnRowForSortTest( + "cat_a", null, "proc_gamma", "proc_gamma_spec", "input1", 1, schemaFields)); + + Comparator comparator = + dbMetadata.defineGetProcedureColumnsComparator(schemaFields); + assertNotNull(comparator); + dbMetadata.sortResults(results, comparator, "getProcedureColumns", dbMetadata.LOG); + + assertEquals(5, results.size()); + + // 1. Null cat, sch_x, proc_beta, proc_beta_spec, col_first + assertTrue(results.get(0).get("PROCEDURE_CAT").isNull()); + assertEquals("sch_x", results.get(0).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_beta", results.get(0).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_beta_spec", results.get(0).get("SPECIFIC_NAME").getStringValue()); + assertEquals("col_first", results.get(0).get("COLUMN_NAME").getStringValue()); + + // 2. cat_a, Null schem, proc_gamma, proc_gamma_spec, input1 + assertEquals("cat_a", results.get(1).get("PROCEDURE_CAT").getStringValue()); + assertTrue(results.get(1).get("PROCEDURE_SCHEM").isNull()); + assertEquals("proc_gamma", results.get(1).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_gamma_spec", results.get(1).get("SPECIFIC_NAME").getStringValue()); + assertEquals("input1", results.get(1).get("COLUMN_NAME").getStringValue()); + + // 3. cat_a, sch_z, proc_alpha, proc_alpha_spec, arg_m (m before z) + assertEquals("cat_a", results.get(2).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(2).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(2).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_alpha_spec", results.get(2).get("SPECIFIC_NAME").getStringValue()); + assertEquals("arg_m", results.get(2).get("COLUMN_NAME").getStringValue()); + + // 4. cat_a, sch_z, proc_alpha, proc_alpha_spec, arg_z + assertEquals("cat_a", results.get(3).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_z", results.get(3).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_alpha", results.get(3).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_alpha_spec", results.get(3).get("SPECIFIC_NAME").getStringValue()); + assertEquals("arg_z", results.get(3).get("COLUMN_NAME").getStringValue()); + + // 5. cat_b, sch_y, proc_1, proc_1_spec, param_a + assertEquals("cat_b", results.get(4).get("PROCEDURE_CAT").getStringValue()); + assertEquals("sch_y", results.get(4).get("PROCEDURE_SCHEM").getStringValue()); + assertEquals("proc_1", results.get(4).get("PROCEDURE_NAME").getStringValue()); + assertEquals("proc_1_spec", results.get(4).get("SPECIFIC_NAME").getStringValue()); + assertEquals("param_a", results.get(4).get("COLUMN_NAME").getStringValue()); + } + + @Test + public void testListMatchingProcedureIdsFromDatasets() throws Exception { + String catalog = "test-proj"; + String schema1Name = "dataset1"; + String schema2Name = "dataset2"; + Dataset dataset1 = mockBigQueryDataset(catalog, schema1Name); + Dataset dataset2 = mockBigQueryDataset(catalog, schema2Name); + List datasetsToScan = Arrays.asList(dataset1, dataset2); + + Routine proc1_ds1 = mockBigQueryRoutine(catalog, schema1Name, "proc_a", "PROCEDURE", "desc a"); + Routine func1_ds1 = mockBigQueryRoutine(catalog, schema1Name, "func_b", "FUNCTION", "desc b"); + Routine proc2_ds2 = mockBigQueryRoutine(catalog, schema2Name, "proc_c", "PROCEDURE", "desc c"); + + Page page1 = mock(Page.class); + when(page1.iterateAll()).thenReturn(Arrays.asList(proc1_ds1, func1_ds1)); + when(bigqueryClient.listRoutines(eq(dataset1.getDatasetId()), any(RoutineListOption.class))) + .thenReturn(page1); + + Page page2 = mock(Page.class); + when(page2.iterateAll()).thenReturn(Collections.singletonList(proc2_ds2)); + when(bigqueryClient.listRoutines(eq(dataset2.getDatasetId()), any(RoutineListOption.class))) + .thenReturn(page2); + + ExecutorService mockExecutor = mock(ExecutorService.class); + doAnswer( + invocation -> { + Callable callable = invocation.getArgument(0); + @SuppressWarnings("unchecked") // Suppress warning for raw Future mock + Future mockedFuture = mock(Future.class); + + try { + Object result = callable.call(); + doReturn(result).when(mockedFuture).get(); + } catch (InterruptedException interruptedException) { + doThrow(interruptedException).when(mockedFuture).get(); + } catch (Exception e) { + doThrow(new ExecutionException(e)).when(mockedFuture).get(); + } + return mockedFuture; + }) + .when(mockExecutor) + .submit(any(Callable.class)); + + List resultIds = + dbMetadata.listMatchingProcedureIdsFromDatasets( + datasetsToScan, null, null, mockExecutor, catalog, dbMetadata.LOG); + + assertEquals(2, resultIds.size()); + assertTrue(resultIds.contains(proc1_ds1.getRoutineId())); + assertTrue(resultIds.contains(proc2_ds2.getRoutineId())); + assertFalse(resultIds.contains(func1_ds1.getRoutineId())); // Should not contain functions + + verify(mockExecutor, times(2)).submit(any(Callable.class)); + } + + @Test + public void testSubmitProcedureArgumentProcessingJobs_Basic() throws InterruptedException { + String catalog = "p"; + String schemaName = "d"; + RoutineArgument arg1 = mockRoutineArgument("arg1_name", StandardSQLTypeName.STRING, "IN"); + Routine proc1 = + mockBigQueryRoutineWithArgs( + catalog, schemaName, "proc1", "PROCEDURE", "desc1", Collections.singletonList(arg1)); + Routine func1 = + mockBigQueryRoutineWithArgs( + catalog, + schemaName, + "func1", + "FUNCTION", + "desc_func", + Collections.emptyList()); // Should be skipped + Routine proc2 = + mockBigQueryRoutineWithArgs( + catalog, schemaName, "proc2", "PROCEDURE", "desc2", Collections.emptyList()); + + List fullRoutines = Arrays.asList(proc1, func1, proc2); + Pattern columnNameRegex = null; + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Schema resultSchema = dbMetadata.defineGetProcedureColumnsSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + + ExecutorService mockExecutor = mock(ExecutorService.class); + List> processingTaskFutures = new ArrayList<>(); + + // Capture the runnable submitted to the executor + List submittedRunnables = new ArrayList<>(); + doAnswer( + invocation -> { + Runnable runnable = invocation.getArgument(0); + submittedRunnables.add(runnable); + Future future = mock(Future.class); + return future; + }) + .when(mockExecutor) + .submit(any(Runnable.class)); + + dbMetadata.submitProcedureArgumentProcessingJobs( + fullRoutines, + columnNameRegex, + collectedResults, + resultSchemaFields, + mockExecutor, + processingTaskFutures, + dbMetadata.LOG); + + verify(mockExecutor, times(2)).submit(any(Runnable.class)); + assertEquals(2, processingTaskFutures.size()); + } + + @Test + public void testDefineGetTableTypesSchema() { + Schema schema = BigQueryDatabaseMetaData.defineGetTableTypesSchema(); + + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Should have one column", 1, fields.size()); + + Field tableTypeField = fields.get("TABLE_TYPE"); + assertNotNull("TABLE_TYPE field should exist", tableTypeField); + assertEquals("Field name should be TABLE_TYPE", "TABLE_TYPE", tableTypeField.getName()); + assertEquals( + "Field type should be STRING", + StandardSQLTypeName.STRING, + tableTypeField.getType().getStandardType()); + assertEquals("Field mode should be REQUIRED", Field.Mode.REQUIRED, tableTypeField.getMode()); + } + + @Test + public void testPrepareGetTableTypesRows() { + Schema schema = BigQueryDatabaseMetaData.defineGetTableTypesSchema(); + List rows = BigQueryDatabaseMetaData.prepareGetTableTypesRows(schema); + + assertNotNull("Rows list should not be null", rows); + String[] expectedTableTypes = {"EXTERNAL", "MATERIALIZED VIEW", "SNAPSHOT", "TABLE", "VIEW"}; + assertEquals( + "Should have " + expectedTableTypes.length + " rows", + expectedTableTypes.length, + rows.size()); + + Set foundTypes = new HashSet<>(); + for (int i = 0; i < rows.size(); i++) { + FieldValueList row = rows.get(i); + assertEquals("Row " + i + " should have 1 field value", 1, row.size()); + assertFalse("FieldValue in row " + i + " should not be SQL NULL", row.get(0).isNull()); + + String tableType = row.get(0).getStringValue(); + foundTypes.add(tableType); + } + + assertEquals( + "All expected table types should be present and correctly mapped", + new HashSet<>(Arrays.asList(expectedTableTypes)), + foundTypes); + } + + @Test + public void testGetTableTypes() throws SQLException { + try (ResultSet rs = dbMetadata.getTableTypes()) { + assertNotNull("ResultSet from getTableTypes() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have one column", 1, rsmd.getColumnCount()); + assertEquals("Column name should be TABLE_TYPE", "TABLE_TYPE", rsmd.getColumnName(1)); + assertEquals("Column type should be NVARCHAR", Types.NVARCHAR, rsmd.getColumnType(1)); + + List actualTableTypes = new ArrayList<>(); + while (rs.next()) { + actualTableTypes.add(rs.getString("TABLE_TYPE")); + } + + String[] expectedTableTypes = {"EXTERNAL", "MATERIALIZED VIEW", "SNAPSHOT", "TABLE", "VIEW"}; + assertEquals( + "Number of table types should match", expectedTableTypes.length, actualTableTypes.size()); + + Set expectedSet = new HashSet<>(Arrays.asList(expectedTableTypes)); + Set actualSet = new HashSet<>(actualTableTypes); + assertEquals( + "All expected table types should be present in the ResultSet", expectedSet, actualSet); + } + } + + @Test + public void testDefineGetSuperTablesSchema() { + Schema schema = dbMetadata.defineGetSuperTablesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 4 fields", 4, fields.size()); + + Field tableCat = fields.get("TABLE_CAT"); + assertNotNull(tableCat); + assertEquals("TABLE_CAT", tableCat.getName()); + assertEquals(StandardSQLTypeName.STRING, tableCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableCat.getMode()); + + Field tableSchem = fields.get("TABLE_SCHEM"); + assertNotNull(tableSchem); + assertEquals("TABLE_SCHEM", tableSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, tableSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, tableSchem.getMode()); + + Field tableName = fields.get("TABLE_NAME"); + assertNotNull(tableName); + assertEquals("TABLE_NAME", tableName.getName()); + assertEquals(StandardSQLTypeName.STRING, tableName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, tableName.getMode()); + + Field superTableName = fields.get("SUPERTABLE_NAME"); + assertNotNull(superTableName); + assertEquals("SUPERTABLE_NAME", superTableName.getName()); + assertEquals(StandardSQLTypeName.STRING, superTableName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, superTableName.getMode()); + } + + @Test + public void testGetSuperTables_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = + dbMetadata.getSuperTables("testCatalog", "testSchemaPattern", "testTableNamePattern")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty (next() should return false)", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", metaData); + assertEquals("ResultSetMetaData should have 4 columns", 4, metaData.getColumnCount()); + + // Column 1: TABLE_CAT + assertEquals("TABLE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); // Assuming STRING maps to VARCHAR + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + // Column 2: TABLE_SCHEM + assertEquals("TABLE_SCHEM", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(2)); + + // Column 3: TABLE_NAME + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); // REQUIRED + + // Column 4: SUPERTABLE_NAME + assertEquals("SUPERTABLE_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); // REQUIRED + } + } + + @Test + public void testDefineGetSuperTypesSchema() { + Schema schema = dbMetadata.defineGetSuperTypesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 6 fields", 6, fields.size()); + + Field typeCat = fields.get("TYPE_CAT"); + assertNotNull(typeCat); + assertEquals("TYPE_CAT", typeCat.getName()); + assertEquals(StandardSQLTypeName.STRING, typeCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, typeCat.getMode()); + + Field typeSchem = fields.get("TYPE_SCHEM"); + assertNotNull(typeSchem); + assertEquals("TYPE_SCHEM", typeSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, typeSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, typeSchem.getMode()); + + Field typeName = fields.get("TYPE_NAME"); + assertNotNull(typeName); + assertEquals("TYPE_NAME", typeName.getName()); + assertEquals(StandardSQLTypeName.STRING, typeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, typeName.getMode()); + + Field superTypeCat = fields.get("SUPERTYPE_CAT"); + assertNotNull(superTypeCat); + assertEquals("SUPERTYPE_CAT", superTypeCat.getName()); + assertEquals(StandardSQLTypeName.STRING, superTypeCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, superTypeCat.getMode()); + + Field superTypeSchem = fields.get("SUPERTYPE_SCHEM"); + assertNotNull(superTypeSchem); + assertEquals("SUPERTYPE_SCHEM", superTypeSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, superTypeSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, superTypeSchem.getMode()); + + Field superTypeName = fields.get("SUPERTYPE_NAME"); + assertNotNull(superTypeName); + assertEquals("SUPERTYPE_NAME", superTypeName.getName()); + assertEquals(StandardSQLTypeName.STRING, superTypeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, superTypeName.getMode()); + } + + @Test + public void testGetSuperTypes_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = + dbMetadata.getSuperTypes("testCatalog", "testSchemaPattern", "testTypeNamePattern")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty (next() should return false)", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", metaData); + assertEquals("ResultSetMetaData should have 6 columns", 6, metaData.getColumnCount()); + + // Column 1: TYPE_CAT + assertEquals("TYPE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + // Column 2: TYPE_SCHEM + assertEquals("TYPE_SCHEM", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(2)); + + // Column 3: TYPE_NAME + assertEquals("TYPE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + // Column 4: SUPERTYPE_CAT + assertEquals("SUPERTYPE_CAT", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(4)); + + // Column 5: SUPERTYPE_SCHEM + assertEquals("SUPERTYPE_SCHEM", metaData.getColumnName(5)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(5)); + + // Column 6: SUPERTYPE_NAME + assertEquals("SUPERTYPE_NAME", metaData.getColumnName(6)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + } + } + + @Test + public void testDefineGetAttributesSchema() { + Schema schema = dbMetadata.defineGetAttributesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 21 fields", 21, fields.size()); + + assertEquals("TYPE_CAT", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("ATTR_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("DATA_TYPE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("ORDINAL_POSITION", fields.get(15).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(15).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(15).getMode()); + + assertEquals("IS_NULLABLE", fields.get(16).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(16).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(16).getMode()); + + assertEquals("SOURCE_DATA_TYPE", fields.get(20).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(20).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(20).getMode()); + } + + @Test + public void testGetAttributes_ReturnsEmptyResultSet() throws SQLException { + try (ResultSet rs = + dbMetadata.getAttributes("testCat", "testSchema", "testType", "testAttr%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 21 columns", 21, metaData.getColumnCount()); + + assertEquals("TYPE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + assertEquals("ATTR_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("DATA_TYPE", metaData.getColumnName(5)); + assertEquals(Types.BIGINT, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("ORDINAL_POSITION", metaData.getColumnName(16)); + assertEquals(Types.BIGINT, metaData.getColumnType(16)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(16)); + + assertEquals("IS_NULLABLE", metaData.getColumnName(17)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(17)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(17)); + + assertEquals("SOURCE_DATA_TYPE", metaData.getColumnName(21)); + assertEquals(Types.BIGINT, metaData.getColumnType(21)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(21)); + } + } + + @Test + public void testDefineGetBestRowIdentifierSchema() { + Schema schema = dbMetadata.defineGetBestRowIdentifierSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 8 fields", 8, fields.size()); + + assertEquals("SCOPE", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(0).getMode()); + + assertEquals("COLUMN_NAME", fields.get(1).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(1).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(1).getMode()); + + assertEquals("DATA_TYPE", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("BUFFER_LENGTH", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(5).getMode()); + + assertEquals("DECIMAL_DIGITS", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(6).getMode()); + + assertEquals("PSEUDO_COLUMN", fields.get(7).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(7).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(7).getMode()); + } + + @Test + public void testGetBestRowIdentifier_ReturnsEmptyResultSetWithCorrectMetadata() + throws SQLException { + int testScope = DatabaseMetaData.bestRowSession; + boolean testNullable = true; + + try (ResultSet rs = + dbMetadata.getBestRowIdentifier( + "testCat", "testSchema", "testTable", testScope, testNullable)) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 8 columns", 8, metaData.getColumnCount()); + + assertEquals("SCOPE", metaData.getColumnName(1)); + assertEquals(Types.BIGINT, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(1)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(2)); + + assertEquals("DATA_TYPE", metaData.getColumnName(3)); + assertEquals(Types.BIGINT, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("BUFFER_LENGTH", metaData.getColumnName(6)); + assertEquals(Types.BIGINT, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(6)); + + assertEquals("PSEUDO_COLUMN", metaData.getColumnName(8)); + assertEquals(Types.BIGINT, metaData.getColumnType(8)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(8)); + } + } + + @Test + public void testDefineGetUDTsSchema() { + Schema schema = dbMetadata.defineGetUDTsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 7 fields", 7, fields.size()); + + assertEquals("TYPE_NAME", fields.get("TYPE_NAME").getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get("TYPE_NAME").getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get("TYPE_NAME").getMode()); + + assertEquals("CLASS_NAME", fields.get("CLASS_NAME").getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get("CLASS_NAME").getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get("CLASS_NAME").getMode()); + + assertEquals("DATA_TYPE", fields.get("DATA_TYPE").getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get("DATA_TYPE").getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get("DATA_TYPE").getMode()); + + assertEquals("BASE_TYPE", fields.get("BASE_TYPE").getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get("BASE_TYPE").getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get("BASE_TYPE").getMode()); + } + + @Test + public void testGetUDTs_ReturnsEmptyResultSet() throws SQLException { + int[] types = {Types.STRUCT, Types.DISTINCT}; + try (ResultSet rs = dbMetadata.getUDTs("testCat", "testSchema%", "testType%", types)) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 7 columns", 7, metaData.getColumnCount()); + + assertEquals("TYPE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("DATA_TYPE", metaData.getColumnName(5)); + assertEquals(Types.BIGINT, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("BASE_TYPE", metaData.getColumnName(7)); + assertEquals(Types.BIGINT, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(7)); + } + } + + @Test + public void testDefineGetIndexInfoSchema() { + Schema schema = dbMetadata.defineGetIndexInfoSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 13 fields", 13, fields.size()); + + assertEquals("TABLE_NAME", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("NON_UNIQUE", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.BOOL, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("TYPE", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(6).getMode()); + + assertEquals("CARDINALITY", fields.get(10).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(10).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(10).getMode()); + } + + @Test + public void testGetIndexInfo_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = dbMetadata.getIndexInfo("testCat", "testSchema", "testTable", true, true)) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 13 columns", 13, metaData.getColumnCount()); + + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("NON_UNIQUE", metaData.getColumnName(4)); + assertEquals(Types.BOOLEAN, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("TYPE", metaData.getColumnName(7)); + assertEquals(Types.BIGINT, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(7)); + + assertEquals("CARDINALITY", metaData.getColumnName(11)); + assertEquals(Types.BIGINT, metaData.getColumnType(11)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(11)); + } + } + + @Test + public void testDefineGetTablePrivilegesSchema() { + Schema schema = dbMetadata.defineGetTablePrivilegesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 7 fields", 7, fields.size()); + + assertEquals("TABLE_CAT", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("TABLE_NAME", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("GRANTEE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("PRIVILEGE", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(5).getMode()); + + assertEquals("IS_GRANTABLE", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(6).getMode()); + } + + @Test + public void testGetTablePrivileges_ReturnsEmptyResultSetWithCorrectMetadata() + throws SQLException { + try (ResultSet rs = dbMetadata.getTablePrivileges("testCat", "testSchema%", "testTable%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 7 columns", 7, metaData.getColumnCount()); + + assertEquals("TABLE_CAT", metaData.getColumnName(1)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("GRANTEE", metaData.getColumnName(5)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("PRIVILEGE", metaData.getColumnName(6)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + + assertEquals("IS_GRANTABLE", metaData.getColumnName(7)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(7)); + } + } + + @Test + public void testDefineGetColumnPrivilegesSchema() { + Schema schema = dbMetadata.defineGetColumnPrivilegesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 8 fields", 8, fields.size()); + + assertEquals("TABLE_SCHEM", fields.get(1).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(1).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(1).getMode()); + + assertEquals("COLUMN_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("GRANTOR", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(4).getMode()); + + assertEquals("PRIVILEGE", fields.get(6).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(6).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(6).getMode()); + + assertEquals("IS_GRANTABLE", fields.get(7).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(7).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(7).getMode()); + } + + @Test + public void testGetColumnPrivileges_ReturnsEmptyResultSetWithCorrectMetadata() + throws SQLException { + try (ResultSet rs = + dbMetadata.getColumnPrivileges("testCat", "testSchema", "testTable", "testCol%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 8 columns", 8, metaData.getColumnCount()); + + assertEquals("TABLE_SCHEM", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(2)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("GRANTOR", metaData.getColumnName(5)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(5)); + + assertEquals("PRIVILEGE", metaData.getColumnName(7)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(7)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(7)); + + assertEquals("IS_GRANTABLE", metaData.getColumnName(8)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(8)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(8)); + } + } + + @Test + public void testDefineGetVersionColumnsSchema() { + Schema schema = dbMetadata.defineGetVersionColumnsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 8 fields", 8, fields.size()); + + assertEquals("SCOPE", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("COLUMN_NAME", fields.get(1).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(1).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(1).getMode()); + + assertEquals("DATA_TYPE", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("BUFFER_LENGTH", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(5).getMode()); + + assertEquals("PSEUDO_COLUMN", fields.get(7).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(7).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(7).getMode()); + } + + @Test + public void testGetVersionColumns_ReturnsEmptyResultSetWithCorrectMetadata() throws SQLException { + try (ResultSet rs = dbMetadata.getVersionColumns("testCat", "testSchema", "testTable")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 8 columns", 8, metaData.getColumnCount()); + + assertEquals("SCOPE", metaData.getColumnName(1)); + assertEquals(Types.BIGINT, metaData.getColumnType(1)); + assertEquals(ResultSetMetaData.columnNullable, metaData.isNullable(1)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(2)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(2)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(2)); + + assertEquals("DATA_TYPE", metaData.getColumnName(3)); + assertEquals(Types.BIGINT, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("BUFFER_LENGTH", metaData.getColumnName(6)); + assertEquals(Types.BIGINT, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + + assertEquals("PSEUDO_COLUMN", metaData.getColumnName(8)); + assertEquals(Types.BIGINT, metaData.getColumnType(8)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(8)); + } + } + + @Test + public void testDefineGetPseudoColumnsSchema() { + Schema schema = dbMetadata.defineGetPseudoColumnsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 12 fields", 12, fields.size()); + + assertEquals("TABLE_NAME", fields.get(2).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(2).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(2).getMode()); + + assertEquals("COLUMN_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("DATA_TYPE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("COLUMN_SIZE", fields.get(5).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(5).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(5).getMode()); + + assertEquals("COLUMN_USAGE", fields.get(8).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(8).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(8).getMode()); + + assertEquals("IS_NULLABLE", fields.get(11).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(11).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(11).getMode()); + } + + @Test + public void testGetPseudoColumns_ReturnsEmptyResultSet() throws SQLException { + try (ResultSet rs = dbMetadata.getPseudoColumns("testCat", "testSchema%", "testTable%", "%")) { + assertNotNull("ResultSet should not be null", rs); + assertFalse("ResultSet should be empty", rs.next()); + + ResultSetMetaData metaData = rs.getMetaData(); + assertEquals("ResultSetMetaData should have 12 columns", 12, metaData.getColumnCount()); + + assertEquals("TABLE_NAME", metaData.getColumnName(3)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(3)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(3)); + + assertEquals("COLUMN_NAME", metaData.getColumnName(4)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(4)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(4)); + + assertEquals("DATA_TYPE", metaData.getColumnName(5)); + assertEquals(Types.BIGINT, metaData.getColumnType(5)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(5)); + + assertEquals("COLUMN_SIZE", metaData.getColumnName(6)); + assertEquals(Types.BIGINT, metaData.getColumnType(6)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(6)); + + assertEquals("COLUMN_USAGE", metaData.getColumnName(9)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(9)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(9)); + + assertEquals("IS_NULLABLE", metaData.getColumnName(12)); + assertEquals(Types.NVARCHAR, metaData.getColumnType(12)); + assertEquals(ResultSetMetaData.columnNoNulls, metaData.isNullable(12)); + } + } + + @Test + public void testDefineGetFunctionsSchema() { + Schema schema = dbMetadata.defineGetFunctionsSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(6, fields.size()); + + Field funcCat = fields.get("FUNCTION_CAT"); + assertEquals("FUNCTION_CAT", funcCat.getName()); + assertEquals(StandardSQLTypeName.STRING, funcCat.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, funcCat.getMode()); + + Field funcSchem = fields.get("FUNCTION_SCHEM"); + assertEquals("FUNCTION_SCHEM", funcSchem.getName()); + assertEquals(StandardSQLTypeName.STRING, funcSchem.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, funcSchem.getMode()); + + Field funcName = fields.get("FUNCTION_NAME"); + assertEquals("FUNCTION_NAME", funcName.getName()); + assertEquals(StandardSQLTypeName.STRING, funcName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, funcName.getMode()); + + Field remarks = fields.get("REMARKS"); + assertEquals("REMARKS", remarks.getName()); + assertEquals(StandardSQLTypeName.STRING, remarks.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, remarks.getMode()); + + Field funcType = fields.get("FUNCTION_TYPE"); + assertEquals("FUNCTION_TYPE", funcType.getName()); + assertEquals(StandardSQLTypeName.INT64, funcType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, funcType.getMode()); + + Field specificName = fields.get("SPECIFIC_NAME"); + assertEquals("SPECIFIC_NAME", specificName.getName()); + assertEquals(StandardSQLTypeName.STRING, specificName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, specificName.getMode()); + } + + @Test + public void testProcessFunctionInfo_ScalarFunction() { + Schema resultSchema = dbMetadata.defineGetFunctionsSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-func"; + String schema = "dataset_func"; + String name = "my_scalar_func"; + String description = "A test scalar function"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "SCALAR_FUNCTION", description); + + dbMetadata.processFunctionInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(6, row.size()); + assertEquals(catalog, row.get("FUNCTION_CAT").getStringValue()); + assertEquals(schema, row.get("FUNCTION_SCHEM").getStringValue()); + assertEquals(name, row.get("FUNCTION_NAME").getStringValue()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionResultUnknown), + row.get("FUNCTION_TYPE").getStringValue()); + assertEquals(name, row.get("SPECIFIC_NAME").getStringValue()); + } + + @Test + public void testProcessFunctionInfo_TableFunction() { + Schema resultSchema = dbMetadata.defineGetFunctionsSchema(); + FieldList resultSchemaFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + String catalog = "proj-func"; + String schema = "dataset_func"; + String name = "my_table_func"; + String description = "A test Table function"; + + Routine routine = mockBigQueryRoutine(catalog, schema, name, "TABLE_FUNCTION", description); + + dbMetadata.processFunctionInfo(routine, collectedResults, resultSchemaFields); + + assertEquals(1, collectedResults.size()); + FieldValueList row = collectedResults.get(0); + assertNotNull(row); + assertEquals(6, row.size()); + assertEquals(catalog, row.get("FUNCTION_CAT").getStringValue()); + assertEquals(schema, row.get("FUNCTION_SCHEM").getStringValue()); + assertEquals(name, row.get("FUNCTION_NAME").getStringValue()); + assertEquals(description, row.get("REMARKS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionReturnsTable), + row.get("FUNCTION_TYPE").getStringValue()); + assertEquals(name, row.get("SPECIFIC_NAME").getStringValue()); + } + + private FieldValueList createFunctionRow( + String cat, + String schem, + String name, + String specName, + int funcType, + FieldList schemaFields) { + List values = new ArrayList<>(); + values.add(dbMetadata.createStringFieldValue(cat)); // FUNCTION_CAT + values.add(dbMetadata.createStringFieldValue(schem)); // FUNCTION_SCHEM + values.add(dbMetadata.createStringFieldValue(name)); // FUNCTION_NAME + values.add(dbMetadata.createStringFieldValue("Remark for " + name)); // REMARKS + values.add(dbMetadata.createLongFieldValue((long) funcType)); // FUNCTION_TYPE + values.add(dbMetadata.createStringFieldValue(specName)); // SPECIFIC_NAME + return FieldValueList.of(values, schemaFields); + } + + @Test + public void testSortResults_Functions() { + Schema resultSchema = dbMetadata.defineGetFunctionsSchema(); + FieldList schemaFields = resultSchema.getFields(); + List results = new ArrayList<>(); + + // Add rows in unsorted order + results.add( + createFunctionRow( + "cat_b", + "sch_c", + "func_1", + "func_1_spec", + DatabaseMetaData.functionResultUnknown, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + "sch_z", + "func_alpha", + "func_alpha_spec", + DatabaseMetaData.functionReturnsTable, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + "sch_z", + "func_beta", + "func_beta_spec", + DatabaseMetaData.functionResultUnknown, + schemaFields)); + results.add( + createFunctionRow( + null, + "sch_y", + "func_gamma", + "func_gamma_spec", + DatabaseMetaData.functionReturnsTable, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + null, + "func_delta", + "func_delta_spec", + DatabaseMetaData.functionResultUnknown, + schemaFields)); + results.add( + createFunctionRow( + "cat_a", + "sch_z", + "func_alpha", + "func_alpha_spec_older", + DatabaseMetaData.functionReturnsTable, + schemaFields)); + + Comparator comparator = dbMetadata.defineGetFunctionsComparator(schemaFields); + dbMetadata.sortResults(results, comparator, "getFunctions", dbMetadata.LOG); + + // Expected Order: Null Cat, then Cat A (Null Schem, then sch_z), then Cat B. Within that, Name, + // then Spec Name. + assertEquals(6, results.size()); + + // Check order based on the comparator (CAT, SCHEM, NAME, SPECIFIC_NAME) + assertEquals("func_gamma", results.get(0).get("FUNCTION_NAME").getStringValue()); // null cat + assertEquals( + "func_delta", results.get(1).get("FUNCTION_NAME").getStringValue()); // cat_a, null schem + assertEquals( + "func_alpha", + results.get(2).get("FUNCTION_NAME").getStringValue()); // cat_a, sch_z, alpha, spec + assertEquals( + "func_alpha", + results.get(3).get("FUNCTION_NAME").getStringValue()); // cat_a, sch_z, alpha, spec_older + assertEquals( + "func_beta", results.get(4).get("FUNCTION_NAME").getStringValue()); // cat_a, sch_z, beta + assertEquals("func_1", results.get(5).get("FUNCTION_NAME").getStringValue()); // cat_b + } + + @Test + public void testDefineGetTypeInfoSchema() { + Schema schema = dbMetadata.defineGetTypeInfoSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 18 fields", 18, fields.size()); + + Field typeName = fields.get("TYPE_NAME"); + assertNotNull(typeName); + assertEquals("TYPE_NAME", typeName.getName()); + assertEquals(StandardSQLTypeName.STRING, typeName.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, typeName.getMode()); + + Field dataType = fields.get("DATA_TYPE"); + assertNotNull(dataType); + assertEquals("DATA_TYPE", dataType.getName()); + assertEquals(StandardSQLTypeName.INT64, dataType.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, dataType.getMode()); + + Field precision = fields.get("PRECISION"); + assertNotNull(precision); + assertEquals("PRECISION", precision.getName()); + assertEquals(StandardSQLTypeName.INT64, precision.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, precision.getMode()); + + Field caseSensitive = fields.get("CASE_SENSITIVE"); + assertNotNull(caseSensitive); + assertEquals("CASE_SENSITIVE", caseSensitive.getName()); + assertEquals(StandardSQLTypeName.BOOL, caseSensitive.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, caseSensitive.getMode()); + + Field numPrecRadix = fields.get("NUM_PREC_RADIX"); + assertNotNull(numPrecRadix); + assertEquals("NUM_PREC_RADIX", numPrecRadix.getName()); + assertEquals(StandardSQLTypeName.INT64, numPrecRadix.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, numPrecRadix.getMode()); + } + + @Test + public void testPrepareGetTypeInfoRows() { + Schema typeInfoSchema = dbMetadata.defineGetTypeInfoSchema(); + FieldList schemaFields = typeInfoSchema.getFields(); + List rows = dbMetadata.prepareGetTypeInfoRows(schemaFields); + + assertNotNull("Rows list should not be null", rows); + assertEquals("Should have 17 rows for 17 types", 17, rows.size()); + + // INT64 (should be BIGINT in JDBC) + Optional int64RowOpt = + rows.stream() + .filter(row -> "INT64".equals(row.get("TYPE_NAME").getStringValue())) + .findFirst(); + assertTrue("INT64 type info row should exist", int64RowOpt.isPresent()); + FieldValueList int64Row = int64RowOpt.get(); + assertEquals(String.valueOf(Types.BIGINT), int64Row.get("DATA_TYPE").getStringValue()); + assertEquals("19", int64Row.get("PRECISION").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.typeNullable), int64Row.get("NULLABLE").getStringValue()); + assertEquals("0", int64Row.get("CASE_SENSITIVE").getStringValue()); + assertEquals("10", int64Row.get("NUM_PREC_RADIX").getStringValue()); + + // BOOL (should be BOOLEAN in JDBC) + Optional boolRowOpt = + rows.stream() + .filter(row -> "BOOL".equals(row.get("TYPE_NAME").getStringValue())) + .findFirst(); + assertTrue("BOOL type info row should exist", boolRowOpt.isPresent()); + FieldValueList boolRow = boolRowOpt.get(); + assertEquals(String.valueOf(Types.BOOLEAN), boolRow.get("DATA_TYPE").getStringValue()); + assertEquals("1", boolRow.get("PRECISION").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.typeNullable), boolRow.get("NULLABLE").getStringValue()); + assertEquals("0", boolRow.get("CASE_SENSITIVE").getStringValue()); // false + assertTrue(boolRow.get("NUM_PREC_RADIX").isNull()); + + // STRING (should be NVARCHAR in JDBC) + Optional stringRowOpt = + rows.stream() + .filter(row -> "STRING".equals(row.get("TYPE_NAME").getStringValue())) + .findFirst(); + assertTrue("STRING type info row should exist", stringRowOpt.isPresent()); + FieldValueList stringRow = stringRowOpt.get(); + assertEquals(String.valueOf(Types.NVARCHAR), stringRow.get("DATA_TYPE").getStringValue()); + assertTrue(stringRow.get("PRECISION").isNull()); // Precision is null for STRING + assertEquals("'", stringRow.get("LITERAL_PREFIX").getStringValue()); + assertEquals("'", stringRow.get("LITERAL_SUFFIX").getStringValue()); + assertEquals("LENGTH", stringRow.get("CREATE_PARAMS").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.typeNullable), stringRow.get("NULLABLE").getStringValue()); + assertEquals("1", stringRow.get("CASE_SENSITIVE").getStringValue()); // true + assertTrue(stringRow.get("NUM_PREC_RADIX").isNull()); + } + + @Test + public void testGetTypeInfo() throws SQLException { + try (ResultSet rs = dbMetadata.getTypeInfo()) { + assertNotNull("ResultSet from getTypeInfo() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have 18 columns", 18, rsmd.getColumnCount()); + assertEquals("TYPE_NAME", rsmd.getColumnName(1)); + assertEquals("DATA_TYPE", rsmd.getColumnName(2)); + assertEquals("PRECISION", rsmd.getColumnName(3)); + + List dataTypes = new ArrayList<>(); + int rowCount = 0; + while (rs.next()) { + rowCount++; + dataTypes.add(rs.getInt("DATA_TYPE")); + if ("INT64".equals(rs.getString("TYPE_NAME"))) { + assertEquals(Types.BIGINT, rs.getInt("DATA_TYPE")); + assertEquals(19, rs.getInt("PRECISION")); + } + } + assertEquals("Should have 17 rows for 17 types", 17, rowCount); + + // Verify sorting by DATA_TYPE + List sortedDataTypes = new ArrayList<>(dataTypes); + Collections.sort(sortedDataTypes); + assertEquals("Results should be sorted by DATA_TYPE", sortedDataTypes, dataTypes); + } + } + + @Test + public void testDefineGetFunctionColumnsSchema() { + Schema schema = dbMetadata.defineGetFunctionColumnsSchema(); + assertNotNull(schema); + FieldList fields = schema.getFields(); + assertEquals(17, fields.size()); + + assertEquals("FUNCTION_CAT", fields.get(0).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(0).getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, fields.get(0).getMode()); + + assertEquals("COLUMN_NAME", fields.get(3).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(3).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(3).getMode()); + + assertEquals("COLUMN_TYPE", fields.get(4).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(4).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(4).getMode()); + + assertEquals("ORDINAL_POSITION", fields.get(14).getName()); + assertEquals(StandardSQLTypeName.INT64, fields.get(14).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(14).getMode()); + + assertEquals("SPECIFIC_NAME", fields.get(16).getName()); + assertEquals(StandardSQLTypeName.STRING, fields.get(16).getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, fields.get(16).getMode()); + } + + @Test + public void testCreateFunctionColumnRow() { + StandardSQLDataType stringType = mockStandardSQLDataType(StandardSQLTypeName.STRING); + List row = + dbMetadata.createFunctionColumnRow( + "cat", + "sch", + "func", + "func_spec", + "param_in", + DatabaseMetaData.functionColumnIn, + stringType, + 1); + + assertEquals(17, row.size()); + assertEquals("cat", row.get(0).getStringValue()); + assertEquals("sch", row.get(1).getStringValue()); + assertEquals("func", row.get(2).getStringValue()); + assertEquals("param_in", row.get(3).getStringValue()); + assertEquals(String.valueOf(DatabaseMetaData.functionColumnIn), row.get(4).getStringValue()); + assertEquals(String.valueOf(Types.NVARCHAR), row.get(5).getStringValue()); // DATA_TYPE + assertEquals("NVARCHAR", row.get(6).getStringValue()); // TYPE_NAME + assertTrue(row.get(7).isNull()); // PRECISION + assertTrue(row.get(8).isNull()); // LENGTH + assertTrue(row.get(9).isNull()); // SCALE + assertTrue(row.get(10).isNull()); // RADIX + assertEquals( + String.valueOf(DatabaseMetaData.functionNullableUnknown), + row.get(11).getStringValue()); // NULLABLE + assertTrue(row.get(12).isNull()); // REMARKS + assertTrue(row.get(13).isNull()); // CHAR_OCTET_LENGTH (should be columnSize) + assertEquals("1", row.get(14).getStringValue()); // ORDINAL_POSITION + assertEquals("", row.get(15).getStringValue()); // IS_NULLABLE + assertEquals("func_spec", row.get(16).getStringValue()); // SPECIFIC_NAME + } + + @Test + public void testProcessFunctionParametersAndReturnValue_ScalarFunctionWithArgs() { + Schema resultSchema = dbMetadata.defineGetFunctionColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + RoutineArgument arg1 = mockRoutineArgument("in_str", StandardSQLTypeName.STRING, "IN"); + RoutineArgument arg2 = mockRoutineArgument("in_int", StandardSQLTypeName.INT64, "IN"); + Routine scalarFunc = + mockBigQueryRoutineWithArgs( + "cat", "ds", "my_scalar", "SCALAR_FUNCTION", "desc", Arrays.asList(arg1, arg2)); + when(scalarFunc.getReturnTableType()).thenReturn(null); // No return table for scalar + + dbMetadata.processFunctionParametersAndReturnValue( + scalarFunc, null, collectedResults, resultFields); + + assertEquals(2, collectedResults.size()); + // First argument + assertEquals("in_str", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnIn), + collectedResults.get(0).get("COLUMN_TYPE").getStringValue()); + assertEquals("1", collectedResults.get(0).get("ORDINAL_POSITION").getStringValue()); + // Second argument + assertEquals("in_int", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnIn), + collectedResults.get(1).get("COLUMN_TYPE").getStringValue()); + assertEquals("2", collectedResults.get(1).get("ORDINAL_POSITION").getStringValue()); + } + + @Test + public void testProcessFunctionParametersAndReturnValue_TableFunctionWithReturnTable() { + Schema resultSchema = dbMetadata.defineGetFunctionColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + + StandardSQLField returnCol1 = mockStandardSQLField("out_id", StandardSQLTypeName.INT64); + StandardSQLField returnCol2 = mockStandardSQLField("out_val", StandardSQLTypeName.STRING); + StandardSQLTableType returnTable = + mockStandardSQLTableType(Arrays.asList(returnCol1, returnCol2)); + + Routine tableFunc = + mockBigQueryRoutineWithArgs( + "cat", "ds", "my_table_func", "TABLE_FUNCTION", "desc", Collections.emptyList()); + when(tableFunc.getReturnTableType()).thenReturn(returnTable); + + dbMetadata.processFunctionParametersAndReturnValue( + tableFunc, null, collectedResults, resultFields); + + assertEquals(2, collectedResults.size()); + // First return column + assertEquals("out_id", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnResult), + collectedResults.get(0).get("COLUMN_TYPE").getStringValue()); + assertEquals("1", collectedResults.get(0).get("ORDINAL_POSITION").getStringValue()); + // Second return column + assertEquals("out_val", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + assertEquals( + String.valueOf(DatabaseMetaData.functionColumnResult), + collectedResults.get(1).get("COLUMN_TYPE").getStringValue()); + assertEquals("2", collectedResults.get(1).get("ORDINAL_POSITION").getStringValue()); + } + + @Test + public void testProcessFunctionParametersAndReturnValue_ColumnNameFilter() { + Schema resultSchema = dbMetadata.defineGetFunctionColumnsSchema(); + FieldList resultFields = resultSchema.getFields(); + List collectedResults = Collections.synchronizedList(new ArrayList<>()); + Pattern columnNamePattern = Pattern.compile("id_.*"); // Match columns starting with "id_" + + RoutineArgument arg1 = mockRoutineArgument("id_arg", StandardSQLTypeName.INT64, "IN"); + RoutineArgument arg2 = mockRoutineArgument("name_arg", StandardSQLTypeName.STRING, "IN"); + StandardSQLField returnCol1 = mockStandardSQLField("id_return", StandardSQLTypeName.BOOL); + StandardSQLField returnCol2 = mockStandardSQLField("value_return", StandardSQLTypeName.FLOAT64); + StandardSQLTableType returnTable = + mockStandardSQLTableType(Arrays.asList(returnCol1, returnCol2)); + + Routine tableFunc = + mockBigQueryRoutineWithArgs( + "cat", "ds", "filter_func", "TABLE_FUNCTION", "desc", Arrays.asList(arg1, arg2)); + when(tableFunc.getReturnTableType()).thenReturn(returnTable); + + dbMetadata.processFunctionParametersAndReturnValue( + tableFunc, columnNamePattern, collectedResults, resultFields); + + assertEquals(2, collectedResults.size()); // Should match id_arg and id_return + assertEquals("id_return", collectedResults.get(0).get("COLUMN_NAME").getStringValue()); + assertEquals("id_arg", collectedResults.get(1).get("COLUMN_NAME").getStringValue()); + } + + @Test + public void testDefineGetClientInfoPropertiesSchema() { + Schema schema = dbMetadata.defineGetClientInfoPropertiesSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Schema should have 4 fields", 4, fields.size()); + + Field nameField = fields.get("NAME"); + assertNotNull(nameField); + assertEquals("NAME", nameField.getName()); + assertEquals(StandardSQLTypeName.STRING, nameField.getType().getStandardType()); + assertEquals(Field.Mode.REQUIRED, nameField.getMode()); + + Field maxLenField = fields.get("MAX_LEN"); + assertNotNull(maxLenField); + assertEquals("MAX_LEN", maxLenField.getName()); + assertEquals(StandardSQLTypeName.INT64, maxLenField.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, maxLenField.getMode()); + + Field defaultValueField = fields.get("DEFAULT_VALUE"); + assertNotNull(defaultValueField); + assertEquals("DEFAULT_VALUE", defaultValueField.getName()); + assertEquals(StandardSQLTypeName.STRING, defaultValueField.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, defaultValueField.getMode()); + + Field descriptionField = fields.get("DESCRIPTION"); + assertNotNull(descriptionField); + assertEquals("DESCRIPTION", descriptionField.getName()); + assertEquals(StandardSQLTypeName.STRING, descriptionField.getType().getStandardType()); + assertEquals(Field.Mode.NULLABLE, descriptionField.getMode()); + } + + @Test + public void testGetClientInfoProperties() throws SQLException { + try (ResultSet rs = dbMetadata.getClientInfoProperties()) { + assertNotNull("ResultSet from getClientInfoProperties() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have 4 columns", 4, rsmd.getColumnCount()); + assertEquals("NAME", rsmd.getColumnName(1)); + assertEquals(Types.NVARCHAR, rsmd.getColumnType(1)); + assertEquals("MAX_LEN", rsmd.getColumnName(2)); + assertEquals(Types.BIGINT, rsmd.getColumnType(2)); + assertEquals("DEFAULT_VALUE", rsmd.getColumnName(3)); + assertEquals(Types.NVARCHAR, rsmd.getColumnType(3)); + assertEquals("DESCRIPTION", rsmd.getColumnName(4)); + assertEquals(Types.NVARCHAR, rsmd.getColumnType(4)); + + List> actualRows = new ArrayList<>(); + while (rs.next()) { + Map row = new HashMap<>(); + row.put("NAME", rs.getString("NAME")); + row.put("MAX_LEN", rs.getLong("MAX_LEN")); + row.put("DEFAULT_VALUE", rs.getObject("DEFAULT_VALUE")); + row.put("DESCRIPTION", rs.getString("DESCRIPTION")); + actualRows.add(row); + } + + assertEquals("Should return 3 client info properties", 3, actualRows.size()); + + Map appNameRow = actualRows.get(0); + assertEquals("ApplicationName", appNameRow.get("NAME")); + assertEquals(25L, appNameRow.get("MAX_LEN")); + assertNull(appNameRow.get("DEFAULT_VALUE")); + assertEquals( + "The name of the application currently utilizing the connection.", + appNameRow.get("DESCRIPTION")); + + Map clientHostnameRow = actualRows.get(1); + assertEquals("ClientHostname", clientHostnameRow.get("NAME")); + + Map clientUserRow = actualRows.get(2); + assertEquals("ClientUser", clientUserRow.get("NAME")); + } + } + + @Test + public void testDefineGetCatalogsSchema() { + Schema schema = dbMetadata.defineGetCatalogsSchema(); + assertNotNull("Schema should not be null", schema); + FieldList fields = schema.getFields(); + assertEquals("Should have one column", 1, fields.size()); + + Field tableCatField = fields.get("TABLE_CAT"); + assertNotNull("TABLE_CAT field should exist", tableCatField); + assertEquals("Field name should be TABLE_CAT", "TABLE_CAT", tableCatField.getName()); + assertEquals( + "Field type should be STRING", + StandardSQLTypeName.STRING, + tableCatField.getType().getStandardType()); + assertEquals("Field mode should be REQUIRED", Field.Mode.REQUIRED, tableCatField.getMode()); + } + + @Test + public void testPrepareGetCatalogsRows() { + Schema catalogsSchema = dbMetadata.defineGetCatalogsSchema(); + FieldList schemaFields = catalogsSchema.getFields(); + + // Test with a valid catalog name + List testCatalogName = new ArrayList<>(); + testCatalogName.add("test_catalog"); + + List rowsWithCatalog = + dbMetadata.prepareGetCatalogsRows(schemaFields, testCatalogName); + + assertNotNull("Rows list should not be null when catalog name is provided", rowsWithCatalog); + assertEquals("Should have one row when a catalog name is provided", 1, rowsWithCatalog.size()); + FieldValueList row = rowsWithCatalog.get(0); + assertEquals("Row should have 1 field value", 1, row.size()); + assertFalse("FieldValue in row should not be SQL NULL", row.get(0).isNull()); + assertEquals( + "TABLE_CAT should match the provided catalog name", + testCatalogName.get(0), + row.get(0).getStringValue()); + + // Test with empty catalog name list + List testEmptyCatalogList = new ArrayList<>(); + List rowsWithNullCatalog = + dbMetadata.prepareGetCatalogsRows(schemaFields, testEmptyCatalogList); + assertNotNull("Rows list should not be null when catalog name is null", rowsWithNullCatalog); + assertTrue("Should have zero rows when catalog name is null", rowsWithNullCatalog.isEmpty()); + } + + @Test + public void testGetSchemas_NoArgs_DelegatesCorrectly() { + BigQueryDatabaseMetaData spiedDbMetadata = spy(dbMetadata); + ResultSet mockResultSet = mock(ResultSet.class); + doReturn(mockResultSet).when(spiedDbMetadata).getSchemas(null, null); + + ResultSet rs = spiedDbMetadata.getSchemas(); + + assertSame( + "The returned ResultSet should be the one from the two-argument method", mockResultSet, rs); + verify(spiedDbMetadata, times(1)).getSchemas(null, null); + } + + // Non-Resultset DatabaseMetadata tests + + @Test + public void testIdentifierQuoteString() { + String actual = dbMetadata.getIdentifierQuoteString(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_QUOTED_IDENTIFIER, actual); + } + + @Test + public void testSQLKeyWords() { + String actual = dbMetadata.getSQLKeywords(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_RESERVED_KEYWORDS, actual); + } + + @Test + public void testNumericFunctions() { + String actual = dbMetadata.getNumericFunctions(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_NUMERIC_FNS, actual); + } + + @Test + public void testStringFunctions() { + String actual = dbMetadata.getStringFunctions(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_STRING_FNS, actual); + } + + @Test + public void testTimeAndDateFunctions() { + String actual = dbMetadata.getTimeDateFunctions(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_TIME_DATE_FNS, actual); + } + + @Test + public void testSystemFunctions() { + String actual = dbMetadata.getSystemFunctions(); + assertNull(actual); + } + + @Test + public void testSearchStringEscape() { + String actual = dbMetadata.getSearchStringEscape(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_ESCAPE, actual); + } + + @Test + public void testExtraNameChars() { + String actual = dbMetadata.getExtraNameCharacters(); + assertNull(actual); + } + + @Test + public void testCatalogSeparator() { + String actual = dbMetadata.getCatalogSeparator(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_CATALOG_SEPARATOR, actual); + } + + @Test + public void testMaxCharLiteralLength() { + int actual = dbMetadata.getMaxCharLiteralLength(); + assertEquals(0, actual); + } + + @Test + public void testMaxBinaryLiteralLength() { + int actual = dbMetadata.getMaxBinaryLiteralLength(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnNameLength() { + int actual = dbMetadata.getMaxColumnNameLength(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_MAX_COL_NAME_LEN, actual); + } + + @Test + public void testMaxColumnsInTable() { + int actual = dbMetadata.getMaxColumnsInTable(); + assertEquals(BigQueryDatabaseMetaData.GOOGLE_SQL_MAX_COLS_PER_TABLE, actual); + } + + @Test + public void testMaxColumnsInSelect() { + int actual = dbMetadata.getMaxColumnsInSelect(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnsInGroupBy() { + int actual = dbMetadata.getMaxColumnsInGroupBy(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnsInOrderBy() { + int actual = dbMetadata.getMaxColumnsInOrderBy(); + assertEquals(0, actual); + } + + @Test + public void testMaxColumnsInIndex() { + int actual = dbMetadata.getMaxColumnsInIndex(); + assertEquals(0, actual); + } + + @Test + public void testSupportsResultSetHoldabilitySupported() { + assertTrue(dbMetadata.supportsResultSetHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT)); + } + + @Test + public void testSupportsResultSetHoldabilityNotSupported() { + assertFalse(dbMetadata.supportsResultSetHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT)); + } + + @Test + public void testSupportsResultSetHoldabilityInvalid() { + assertFalse(dbMetadata.supportsResultSetHoldability(-1)); + } + + @Test + public void testResultSetHoldability() { + int actual = dbMetadata.getResultSetHoldability(); + assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, actual); + } + + @Test + public void testDatabaseMajorVersion() { + int actual = dbMetadata.getDatabaseMajorVersion(); + assertEquals(2, actual); + } + + @Test + public void testDatabaseMinorVersion() { + int actual = dbMetadata.getDatabaseMinorVersion(); + assertEquals(0, actual); + } + + @Test + public void testAllProceduresAreCallable() throws SQLException { + assertFalse(dbMetadata.allProceduresAreCallable()); + } + + @Test + public void testAllTablesAreSelectable() throws SQLException { + assertTrue(dbMetadata.allTablesAreSelectable()); + } + + @Test + public void testGetDriverVersionInfoFromProperties() { + Properties props = new Properties(); + String expectedVersionString = "0.0.0"; + int expectedMajor = 0; + int expectedMinor = 0; + + try (InputStream input = + getClass().getResourceAsStream("/com/google/cloud/bigquery/jdbc/dependencies.properties")) { + if (input != null) { + props.load(input); + expectedVersionString = props.getProperty("version.jdbc"); + if (expectedVersionString != null) { + String[] parts = expectedVersionString.split("\\."); + expectedMajor = Integer.parseInt(parts[0]); + expectedMinor = Integer.parseInt(parts[1].replaceAll("[^0-9].*", "")); + } + } + } catch (IOException | NumberFormatException e) { + fail( + "Error reading or parsing dependencies.properties for testing driver version: " + + e.getMessage()); + } + assertEquals(expectedVersionString, dbMetadata.getDriverVersion()); + assertEquals(expectedMajor, dbMetadata.getDriverMajorVersion()); + assertEquals(expectedMinor, dbMetadata.getDriverMinorVersion()); + } + + @Test + public void testSupportsUnion() throws SQLException { + assertTrue(dbMetadata.supportsUnion()); + } + + @Test + public void testSupportsUnionAll() throws SQLException { + assertTrue(dbMetadata.supportsUnionAll()); + } + + @Test + public void testGetMaxConnections() throws SQLException { + assertEquals(0, dbMetadata.getMaxConnections()); + } + + @Test + public void testGetMaxCursorNameLength() throws SQLException { + assertEquals(0, dbMetadata.getMaxCursorNameLength()); + } + + @Test + public void testGetMaxIndexLength() throws SQLException { + assertEquals(0, dbMetadata.getMaxIndexLength()); + } + + @Test + public void testGetMaxSchemaNameLength() throws SQLException { + assertEquals(1024, dbMetadata.getMaxSchemaNameLength()); + } + + @Test + public void testGetMaxProcedureNameLength() throws SQLException { + assertEquals(256, dbMetadata.getMaxProcedureNameLength()); + } + + @Test + public void testGetMaxCatalogNameLength() throws SQLException { + assertEquals(30, dbMetadata.getMaxCatalogNameLength()); + } + + @Test + public void testGetMaxRowSize() throws SQLException { + assertEquals(0, dbMetadata.getMaxRowSize()); + } + + @Test + public void testDoesMaxRowSizeIncludeBlobs() { + assertFalse(dbMetadata.doesMaxRowSizeIncludeBlobs()); + } + + @Test + public void testGetMaxStatementLength() throws SQLException { + assertEquals(0, dbMetadata.getMaxStatementLength()); + } + + @Test + public void testGetMaxStatements() throws SQLException { + assertEquals(0, dbMetadata.getMaxStatements()); + } + + @Test + public void testGetMaxTableNameLength() throws SQLException { + assertEquals(1024, dbMetadata.getMaxTableNameLength()); + } + + @Test + public void testGetMaxTablesInSelect() throws SQLException { + assertEquals(1000, dbMetadata.getMaxTablesInSelect()); + } + + @Test + public void testGetDefaultTransactionIsolation() throws SQLException { + assertEquals(8, dbMetadata.getDefaultTransactionIsolation()); + } + + @Test + public void testSupportsResultSetType() throws SQLException { + assertTrue(dbMetadata.supportsResultSetType(ResultSet.TYPE_FORWARD_ONLY)); + assertFalse(dbMetadata.supportsResultSetType(ResultSet.TYPE_SCROLL_INSENSITIVE)); + assertFalse(dbMetadata.supportsResultSetType(ResultSet.TYPE_SCROLL_SENSITIVE)); + } + + @Test + public void testSupportsResultSetConcurrency() throws SQLException { + assertTrue( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY)); + assertFalse( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE)); + assertFalse( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)); + assertFalse( + dbMetadata.supportsResultSetConcurrency( + ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_UPDATABLE)); + } + + @Test + public void testGetSQLStateType() throws SQLException { + assertEquals(DatabaseMetaData.sqlStateSQL, dbMetadata.getSQLStateType()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercionsTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercionsTest.java new file mode 100644 index 0000000000..d9cc0efb77 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDefaultCoercionsTest.java @@ -0,0 +1,221 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.BigQueryTypeCoercer.INSTANCE; +import static com.google.common.truth.Truth.assertThat; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.Arrays; +import java.util.Collection; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class BigQueryDefaultCoercionsTest { + + private final BigQueryTypeCoercer bigQueryTypeCoercer; + + public BigQueryDefaultCoercionsTest(String label, BigQueryTypeCoercer bigQueryTypeCoercer) { + this.bigQueryTypeCoercer = bigQueryTypeCoercer; + } + + @Parameters(name = "{index}: {0}") + public static Collection data() { + return Arrays.asList( + new Object[][] { + {"default BigQueryTypeCoercer", INSTANCE}, + {"customizable BigQueryTypeCoercer", BigQueryTypeCoercer.builder().build()} + }); + } + + @Test + public void stringToBoolean() { + assertThat(bigQueryTypeCoercer.coerceTo(Boolean.class, "true")).isTrue(); + assertThat(bigQueryTypeCoercer.coerceTo(Boolean.class, "false")).isFalse(); + } + + @Test + public void stringToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, "3452148")).isEqualTo(3452148); + } + + @Test + public void stringToBigInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(BigInteger.class, "2147483647456")) + .isEqualTo(new BigInteger("2147483647456")); + } + + @Test + public void stringToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, "2147483647456")) + .isEqualTo(Long.valueOf("2147483647456")); + } + + @Test + public void stringToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, "2147483647456.56684593495")) + .isEqualTo(Double.valueOf("2147483647456.56684593495")); + } + + @Test + public void stringToBigDecimal() { + assertThat(bigQueryTypeCoercer.coerceTo(BigDecimal.class, "2147483647456.56684593495")) + .isEqualTo(new BigDecimal("2147483647456.56684593495")); + } + + @Test + public void booleanToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, true)).isEqualTo("true"); + assertThat(bigQueryTypeCoercer.coerceTo(String.class, false)).isEqualTo("false"); + } + + @Test + public void booleanToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, true)).isEqualTo(1); + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, false)).isEqualTo(0); + } + + @Test + public void longToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, 2147483647L)).isEqualTo(2147483647); + } + + @Test + public void longToShort() { + assertThat(bigQueryTypeCoercer.coerceTo(Short.class, 32000L)).isEqualTo((short) 32000); + } + + @Test + public void longToByte() { + assertThat(bigQueryTypeCoercer.coerceTo(Byte.class, 127L)).isEqualTo((byte) 127); + } + + @Test + public void longToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, 2147483647456L)) + .isEqualTo(Double.valueOf("2147483647456")); + } + + @Test + public void longToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, 2147483647456L)) + .isEqualTo("2147483647456"); + } + + @Test + public void doubleToFloat() { + assertThat(bigQueryTypeCoercer.coerceTo(Float.class, Double.valueOf("4567.213245"))) + .isEqualTo(Float.valueOf("4567.213245")); + } + + @Test + public void doubleToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, Double.valueOf("2147483647456.213245"))) + .isEqualTo(2147483647456L); + } + + @Test + public void doubleToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, Double.valueOf("21474836.213245"))) + .isEqualTo(21474836); + } + + @Test + public void doubleToBigDecimal() { + assertThat(bigQueryTypeCoercer.coerceTo(BigDecimal.class, Double.valueOf("21474836.213245"))) + .isEqualTo(new BigDecimal("21474836.213245")); + } + + @Test + public void doubleToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, Double.valueOf("21474836.213245"))) + .isEqualTo("2.1474836213245E7"); + } + + @Test + public void floatToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, 62356.45f)).isEqualTo(62356); + } + + @Test + public void floatToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, 62356.45f)) + .isEqualTo(Double.valueOf(62356.45f)); + } + + @Test + public void floatToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, 62356.45f)).isEqualTo("62356.45"); + } + + @Test + public void bigIntegerToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, new BigInteger("2147483647"))) + .isEqualTo(2147483647L); + } + + @Test + public void bigIntegerToBigDecimal() { + assertThat(bigQueryTypeCoercer.coerceTo(BigDecimal.class, new BigInteger("2147483647"))) + .isEqualTo(new BigDecimal("2147483647")); + } + + @Test + public void bigIntegerToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, new BigInteger("2147483647"))) + .isEqualTo("2147483647"); + } + + @Test + public void bigDecimalToDouble() { + assertThat(bigQueryTypeCoercer.coerceTo(Double.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(2147483647.74356); + } + + @Test + public void bigDecimalToBigInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(BigInteger.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(new BigInteger("2147483647")); + } + + @Test + public void bigDecimalToInteger() { + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(2147483647); + } + + @Test + public void bigDecimalToLong() { + assertThat(bigQueryTypeCoercer.coerceTo(Long.class, new BigDecimal("2147483647.74356"))) + .isEqualTo(2147483647L); + } + + @Test + public void bigDecimalToString() { + assertThat(bigQueryTypeCoercer.coerceTo(String.class, new BigDecimal("2147483647.74356"))) + .isEqualTo("2147483647.74356"); + } + + @Test + public void nullToBoolean() { + assertThat(bigQueryTypeCoercer.coerceTo(Boolean.class, null)).isFalse(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDriverTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDriverTest.java new file mode 100644 index 0000000000..2d7664f5a0 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryDriverTest.java @@ -0,0 +1,112 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.fail; + +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.sql.Connection; +import java.sql.DriverPropertyInfo; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Logger; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryDriverTest { + + static BigQueryDriver bigQueryDriver; + + @Before + public void setUp() { + bigQueryDriver = BigQueryDriver.getRegisteredDriver(); + } + + @Test + public void testInvalidURLReturnsNull() throws SQLException { + assertThat(bigQueryDriver.connect("badURL.com", null)).isNull(); + } + + @Test + public void testValidURLDoesConnect() throws SQLException { + Connection connection = + bigQueryDriver.connect( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=redactedToken;OAuthClientId=redactedToken;" + + "OAuthClientSecret=redactedToken;", + new Properties()); + assertThat(connection.isClosed()).isFalse(); + } + + @Test + public void testInvalidURLInAcceptsURLReturnsFalse() throws SQLException { + assertThat(bigQueryDriver.acceptsURL("badURL.com")).isFalse(); + } + + @Test + public void testValidURLInAcceptsURLReturnsTrue() throws SQLException { + assertThat(bigQueryDriver.acceptsURL("jdbc:bigquery:https://google.com:8080;projectId=123456")) + .isTrue(); + } + + @Test + public void testGetPropertyInfoReturnsValidProperties() { + DriverPropertyInfo[] res = + bigQueryDriver.getPropertyInfo( + "jdbc:bigquery:https://google.com:8080;projectId=123456;OAuthType=3", new Properties()); + int i = 0; + for (BigQueryConnectionProperty prop : BigQueryJdbcUrlUtility.VALID_PROPERTIES) { + assertThat(res[i].name).isEqualTo(prop.getName()); + i++; + } + } + + @Test + public void testGetMajorVersionMatchesDriverMajorVersion() { + assertThat(bigQueryDriver.getMajorVersion()).isEqualTo(0); + } + + @Test + public void testGetMinorVersionMatchesDriverMinorVersion() { + assertThat(bigQueryDriver.getMinorVersion()).isEqualTo(1); + } + + @Test + public void testGetParentLoggerReturnsLogger() { + assertThat(bigQueryDriver.getParentLogger()).isInstanceOf(Logger.class); + } + + @Test + public void testJDBCCompliantReturnsFalse() { + assertThat(bigQueryDriver.jdbcCompliant()).isFalse(); + } + + @Test + public void testConnectWithInvalidUrlChainsException() { + try { + bigQueryDriver.connect( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;InvalidProperty=Value", + new Properties()); + fail("Expected SQLException"); + } catch (SQLException e) { + assertThat((Throwable) e).isInstanceOf(BigQueryJdbcException.class); + assertThat(e.getCause()).isInstanceOf(BigQueryJdbcRuntimeException.class); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBaseTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBaseTest.java new file mode 100644 index 0000000000..1ee627b8af --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcBaseTest.java @@ -0,0 +1,82 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import com.google.cloud.bigquery.jdbc.utils.URIBuilder; + +public class BigQueryJdbcBaseTest { + + // This is a fake pkcs8 key generated specifically for unittests + protected static final String fake_pkcs8_key = + "-----BEGIN PRIVATE KEY-----\n" + + // + "MIIBUwIBADANBgkqhkiG9w0BAQEFAASCAT0wggE5AgEAAkEAnt6w5AMZBvOecsJ9\n" + + // + "4TeVz+GpAtBnTqkxWfxLJykkvb+V/3IhXr5Zw40y47RdoRly/QDFJz3Ac+nmwCSP\n" + + // + "8QW3GQIDAQABAkBPmdrd1COFFSnN7F9wKg65QyMQ0uUAR8v/f2cUbwwGuhwdMuGZ\n" + + // + "DPwgVZySxFKort7TfPru6NzbACL3EFAl9y9RAiEA7XPq5Tu+LOw4/CZFABykguBV\n" + + // + "8rYC+F72+GqkhvlGhZUCIQCrR2/zGIKqJSTKfQhKOteP7cx5dWrumHYNuC5InOGC\n" + + // + "dQIgM6bzgcntJHh+LNtmRw/z+UQzbgiJvN1re7426+VtocECIE7ejFxicviqNfDP\n" + + // + "9ltIES8Dj152hRDtP589qoJhSy5pAiAJot/kBQD8yFYMU1X02oi+6f8QqXxcHwZX\n" + + // + "2wK1Zawz/A==\n" + + // + "-----END PRIVATE KEY-----"; + + protected static URIBuilder getBaseUri() { + return new URIBuilder("jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;"); + } + + protected static URIBuilder getBaseUri(int authType) { + return getBaseUri().append("OAuthType", authType); + } + + protected static URIBuilder getBaseUri(int authType, String projectId) { + return getBaseUri(authType).append("ProjectId", projectId); + } + + protected static URIBuilder getUriOAuthServiceAccount() { + return getBaseUri() + .append("OAuthType", 0) + .append("OAuthServiceAcctEmail", "service@account") + .append("OAuthPvtKey", fake_pkcs8_key); + } + + protected static URIBuilder getUriOAuthUserAccount() { + return getBaseUri() + .append("OAuthType", 1) + .append("OAuthClientId", "client@id") + .append("OAuthClientSecret", "client_secret"); + } + + protected static URIBuilder getUriOAuthToken() { + return getBaseUri().append("OAuthType", 2).append("OAuthAccessToken", "RedactedToken"); + } + + protected static URIBuilder getUriOAuthApplicationDefault() { + return getBaseUri().append("OAuthType", 3); + } + + protected static URIBuilder getUriOAuthExternal() { + return getBaseUri().append("OAuthType", 4).append("OAuthPvtKey", fake_pkcs8_key); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtilityTest.java new file mode 100644 index 0000000000..958a5e99ea --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcOAuthUtilityTest.java @@ -0,0 +1,529 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; + +import com.google.auth.oauth2.GoogleCredentials; +import com.google.auth.oauth2.ImpersonatedCredentials; +import com.google.auth.oauth2.UserAuthorizer; +import com.google.auth.oauth2.UserCredentials; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.PrivateKey; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.junit.Assert; +import org.junit.Ignore; +import org.junit.Test; + +public class BigQueryJdbcOAuthUtilityTest extends BigQueryJdbcBaseTest { + + private static final int USER_AUTH_PORT = 53737; + private static final String EXPECTED_USER_AUTH_URL = + "https://accounts.google.com/o/oauth2/auth?response_type=code&client_id=client_id&redirect_uri=http://localhost:" + + USER_AUTH_PORT + + "&scope=https://www.googleapis.com/auth/bigquery&state=test_state&access_type=offline&prompt=consent&login_hint=test_user&include_granted_scopes=true"; + + @Test + public void testParseOAuthPropsWithSpecialChars() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyProject;OAuthType=0;OAuthServiceAcctEmail=dummy@email.com;" + + "OAuthPvtKey=Key+With+Plus;"), + null); + assertEquals("Key+With+Plus", result.get("OAuthPvtKey")); + } + + @Test + public void testParseOAuthPropsForAuthType0KeyfileOnly() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=0;" + + "OAuthPvtKeyPath=C:\\SecureFiles\\ServiceKeyFile.p12;"), + null); + + assertThat(result.get("OAuthType")).isEqualTo("GOOGLE_SERVICE_ACCOUNT"); + assertThat(result.get("OAuthPvtKeyPath")).isEqualTo("C:\\SecureFiles\\ServiceKeyFile.p12"); + } + + @Test + public void testParseOAuthPropsForAuthType0ViaEmail() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=0;" + + "OAuthServiceAcctEmail=dummytest@dummytest.iam.gserviceaccount.com;" + + "OAuthPvtKey=RedactedKey;"), + null); + + assertThat(result.get("OAuthType")).isEqualTo("GOOGLE_SERVICE_ACCOUNT"); + assertThat(result.get("OAuthServiceAcctEmail")) + .isEqualTo("dummytest@dummytest.iam.gserviceaccount.com"); + assertThat(result.get("OAuthPvtKey")).isEqualTo("RedactedKey"); + } + + @Test + public void testInvalidTokenUriForAuthType0() { + String connectionString = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=0;" + + "OAuthServiceAcctEmail=dummytest@dummytest.iam.gserviceaccount.com;" + + "OAuthPvtKey=" + + fake_pkcs8_key + + ";" + + "EndpointOverrides=OAuth2=brokenuri{};"; + Map oauthProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(DataSource.fromUrl(connectionString), null); + Map overrideProperties = + DataSource.fromUrl(connectionString).getOverrideProperties(); + + try { + BigQueryJdbcOAuthUtility.getCredentials(oauthProperties, overrideProperties, null); + Assert.fail(); + } catch (BigQueryJdbcRuntimeException e) { + assertThat(e.getMessage()).contains("java.net.URISyntaxException"); + } + } + + @Test + public void testParseOAuthPropsForAuthType2() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken;"), + null); + + assertThat(result.get("OAuthType")).isEqualTo("PRE_GENERATED_TOKEN"); + assertThat(result.get("OAuthAccessToken")).isEqualTo("RedactedToken"); + } + + @Test + public void testParseOAuthPropsForAuthType3() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=MyBigQueryProject;"), + null); + + assertThat(result.get("OAuthType")).isEqualTo("APPLICATION_DEFAULT_CREDENTIALS"); + } + + @Test + public void testParseOAuthPropsForDefaultAuthType() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3"), + null); + + assertThat(result.get("OAuthType")).isEqualTo("APPLICATION_DEFAULT_CREDENTIALS"); + } + + @Test + public void testGetCredentialsForPreGeneratedToken() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken;"), + null); + + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, Collections.EMPTY_MAP, null); + assertThat(credentials).isNotNull(); + } + + @Test + public void testGetCredentialsForPreGeneratedTokenTPC() throws IOException { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken;" + + "universeDomain=testDomain;"), + null); + Map stringStringMap = new HashMap<>(); + stringStringMap.put( + BigQueryJdbcUrlUtility.UNIVERSE_DOMAIN_OVERRIDE_PROPERTY_NAME, "testDomain"); + Map overrideProperties = new HashMap<>(stringStringMap); + + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, overrideProperties, null); + assertThat(credentials.getUniverseDomain()).isEqualTo("testDomain"); + } + + @Test + @Ignore // For running locally only similar to our other JDBC tests. + public void testGetCredentialsForApplicationDefault() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=MyBigQueryProject;"), + null); + + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, null, null); + assertThat(credentials).isNotNull(); + } + + @Test + public void testParseOAuthPropsForUserAuth() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;ProjectId=MyBigQueryProject;" + + "OAuthClientId=client;OAuthClientSecret=secret;"), + null); + + assertThat(authProperties.get("OAuthType")).isEqualTo("GOOGLE_USER_ACCOUNT"); + assertThat(authProperties.get("OAuthClientId")).isEqualTo("client"); + assertThat(authProperties.get("OAuthClientSecret")).isEqualTo("secret"); + } + + @Test + public void testGenerateUserAuthURL() { + try { + HashMap authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "client_id"); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "client_secret"); + + UserAuthorizer userAuthorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, new HashMap(), USER_AUTH_PORT, null); + + String userId = "test_user"; + String state = "test_state"; + URI baseURI = URI.create("http://example.com/foo"); + + URL authURL = userAuthorizer.getAuthorizationUrl(userId, state, baseURI); + + assertThat(authURL.toString()).isEqualTo(EXPECTED_USER_AUTH_URL); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testGenerateUserAuthURLOverrideOauthEndpoint() { + try { + + URI overrideTokenSeverURI = new URI("https://oauth2-gsprivateall.p.googleapis.com/token"); + String connectionString = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=1;" + + "OAuthClientId=client;OAuthClientSecret=secret;" + + "EndpointOverrides=OAuth2=" + + overrideTokenSeverURI + + ";"; + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(DataSource.fromUrl(connectionString), null); + Map overrideProperties = + DataSource.fromUrl(connectionString).getOverrideProperties(); + + UserAuthorizer userAuthorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, overrideProperties, USER_AUTH_PORT, null); + + assertThat(overrideTokenSeverURI).isEqualTo(userAuthorizer.toBuilder().getTokenServerUri()); + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testParseOAuthPropsForRefreshToken() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=2;ProjectId=MyBigQueryProject;OAuthRefreshToken=token;" + + "OAuthClientId=client;OAuthClientSecret=secret;"), + null); + + assertThat(authProperties.get("OAuthType")).isEqualTo("PRE_GENERATED_TOKEN"); + assertThat(authProperties.get("OAuthRefreshToken")).isEqualTo("token"); + assertThat(authProperties.get("OAuthClientId")).isEqualTo("client"); + assertThat(authProperties.get("OAuthClientSecret")).isEqualTo("secret"); + } + + @Test + public void testParseOverridePropsForRefreshTokenAuth() { + try { + + String connectionString = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=2;OAuthRefreshToken=token;" + + "OAuthClientId=client;OAuthClientSecret=secret;" + + "EndpointOverrides=Oauth2=https://oauth2-private.p.googleapis.com/token;"; + + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties(DataSource.fromUrl(connectionString), null); + Map overrideProperties = + DataSource.fromUrl(connectionString).getOverrideProperties(); + + UserCredentials userCredentials = + BigQueryJdbcOAuthUtility.getPreGeneratedRefreshTokenCredentials( + authProperties, overrideProperties, null); + + assertThat(userCredentials.toBuilder().getTokenServerUri()) + .isEqualTo(URI.create("https://oauth2-private.p.googleapis.com/token")); + + } catch (URISyntaxException e) { + throw new RuntimeException(e); + } + } + + @Test + public void testParseBYOIDProps() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:433;OAuthType=4;" + + "ProjectId=MyBigQueryProject;" + + "BYOID_AudienceUri=//iam.googleapis.com/locations/global/workforcePools/pool-id/providers/provider-id;" + + "BYOID_PoolUserProject=workforceProjectNumber;" + + "BYOID_CredentialSource={\"file\": \"C:\\\\Token.txt\"};" + + "BYOID_SA_Impersonation_Uri=testSA;" + + "BYOID_SubjectTokenType=urn:ietf:params:oauth:tokentype:jwt;" + + "BYOID_TokenUri=https://testuri.com/v1/token"), + null); + + assertThat(result.get("BYOID_AudienceUri")) + .isEqualTo( + "//iam.googleapis.com/locations/global/workforcePools/pool-id/providers/provider-id"); + assertThat(result.get("BYOID_PoolUserProject")).isEqualTo("workforceProjectNumber"); + assertThat(result.get("BYOID_CredentialSource")).isEqualTo("{\"file\": \"C:\\\\Token.txt\"}"); + assertThat(result.get("BYOID_SA_Impersonation_Uri")).isEqualTo("testSA"); + assertThat(result.get("BYOID_SubjectTokenType")) + .isEqualTo("urn:ietf:params:oauth:tokentype:jwt"); + assertThat(result.get("BYOID_TokenUri")).isEqualTo("https://testuri.com/v1/token"); + } + + @Test + public void testParseOAuthProperties_UserAccount_RequestDriveScopeEnabled() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;OAuthClientId=redactedClientId;OAuthClientSecret=redactedClientSecret;" + + "RequestGoogleDriveScope=1;"; + Map properties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl(url), this.getClass().getName()); + assertEquals( + String.valueOf(BigQueryJdbcOAuthUtility.AuthType.GOOGLE_USER_ACCOUNT), + properties.get(BigQueryJdbcUrlUtility.OAUTH_TYPE_PROPERTY_NAME)); + assertEquals( + "redactedClientId", properties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME)); + assertEquals( + "redactedClientSecret", + properties.get(BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME)); + assertEquals( + "1", properties.get(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + } + + @Test + public void testParseOAuthProperties_UserAccount_RequestDriveScopeDisabled() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;OAuthClientId=redactedClientId;OAuthClientSecret=redactedClientSecret;" + + "RequestGoogleDriveScope=0;"; + Map properties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl(url), this.getClass().getName()); + assertEquals( + "0", properties.get(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + } + + @Test + public void testParseOAuthProperties_UserAccount_RequestDriveScopeDefault() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=1;OAuthClientId=redactedClientId;OAuthClientSecret=redactedClientSecret;"; + Map properties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl(url), this.getClass().getName()); + assertEquals( + String.valueOf(BigQueryJdbcUrlUtility.DEFAULT_REQUEST_GOOGLE_DRIVE_SCOPE_VALUE), + properties.get(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME)); + } + + @Test + public void testGetUserAuthorizer_WithDriveScope() throws URISyntaxException { + Map authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "redactedClientId"); + authProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "redactedClientSecret"); + authProperties.put(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, "1"); + + UserAuthorizer authorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, Collections.emptyMap(), 12345, this.getClass().getName()); + + assertTrue(authorizer.getScopes().contains("https://www.googleapis.com/auth/bigquery")); + assertTrue(authorizer.getScopes().contains("https://www.googleapis.com/auth/drive.readonly")); + assertEquals(2, authorizer.getScopes().size()); + } + + @Test + public void testGetUserAuthorizer_WithoutDriveScope() throws URISyntaxException { + Map authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "redactedClientId"); + authProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "redactedClientSecret"); + authProperties.put(BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, "0"); + + UserAuthorizer authorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, Collections.emptyMap(), 12345, this.getClass().getName()); + assertTrue(authorizer.getScopes().contains("https://www.googleapis.com/auth/bigquery")); + assertFalse(authorizer.getScopes().contains("https://www.googleapis.com/auth/drive.readonly")); + assertEquals(1, authorizer.getScopes().size()); + } + + @Test + public void testGetUserAuthorizer_InvalidDriveScopeValue() throws URISyntaxException { + Map authProperties = new HashMap<>(); + authProperties.put(BigQueryJdbcUrlUtility.OAUTH_CLIENT_ID_PROPERTY_NAME, "redactedClientId"); + authProperties.put( + BigQueryJdbcUrlUtility.OAUTH_CLIENT_SECRET_PROPERTY_NAME, "redactedClientSecret"); + authProperties.put( + BigQueryJdbcUrlUtility.REQUEST_GOOGLE_DRIVE_SCOPE_PROPERTY_NAME, "invalid_value"); + UserAuthorizer authorizer = + BigQueryJdbcOAuthUtility.getUserAuthorizer( + authProperties, Collections.emptyMap(), 12345, this.getClass().getName()); + assertFalse(authorizer.getScopes().contains("https://www.googleapis.com/auth/drive.readonly")); + } + + @Test + public void testParseUserImpersonationDefault() { + String connectionUri = + getUriOAuthServiceAccount() + .append("ServiceAccountImpersonationEmail", "impersonated") + .toString(); + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties(DataSource.fromUrl(connectionUri), ""); + assertEquals( + "impersonated", + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME)); + assertEquals( + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_SCOPES_VALUE, + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME)); + assertEquals( + BigQueryJdbcUrlUtility.DEFAULT_OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_VALUE, + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME)); + } + + @Test + public void testParseUserImpersonationNonDefault() { + Map result = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + getUriOAuthServiceAccount() + .append("ServiceAccountImpersonationEmail", "impersonated") + .append("ServiceAccountImpersonationScopes", "scopes") + .append("ServiceAccountImpersonationTokenLifetime", 300) + .toString()), + ""); + assertEquals( + "impersonated", + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_EMAIL_PROPERTY_NAME)); + assertEquals( + "scopes", result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_SCOPES_PROPERTY_NAME)); + assertEquals( + "300", + result.get(BigQueryJdbcUrlUtility.OAUTH_SA_IMPERSONATION_TOKEN_LIFETIME_PROPERTY_NAME)); + } + + @Test + public void testGetServiceAccountImpersonatedCredentials() { + Map authProperties = + BigQueryJdbcOAuthUtility.parseOAuthProperties( + DataSource.fromUrl( + getUriOAuthServiceAccount() + .append("ServiceAccountImpersonationEmail", "impersonated") + .toString()), + ""); + GoogleCredentials credentials = + BigQueryJdbcOAuthUtility.getCredentials(authProperties, Collections.EMPTY_MAP, null); + assertThat(credentials).isInstanceOf(ImpersonatedCredentials.class); + } + + @Test + public void testPrivateKeyFromPkcs8() { + PrivateKey pk = BigQueryJdbcOAuthUtility.privateKeyFromPkcs8(fake_pkcs8_key); + assertNotNull(pk); + } + + @Test + public void testPrivateKeyFromPkcs8_wrong() { + PrivateKey pk = BigQueryJdbcOAuthUtility.privateKeyFromPkcs8(""); + assertNull(pk); + } + + // Command to generate key: + // keytool -genkey -alias privatekey -keyalg RSA -keysize 2048 -storepass notasecret \ + // -keypass notasecret -storetype pkcs12 -keystore ./fake.p12 + @Test + public void testPrivateKeyFromP12Bytes() { + URL resource = BigQueryJdbcOAuthUtilityTest.class.getResource("/fake.p12"); + try { + PrivateKey pk = + BigQueryJdbcOAuthUtility.privateKeyFromP12Bytes( + Files.readAllBytes(Paths.get(resource.toURI())), "notasecret"); + assertNotNull(pk); + } catch (Exception e) { + assertTrue(false); + } + } + + @Test + public void testPrivateKeyFromP12Bytes_wrong_password() { + URL resource = BigQueryJdbcOAuthUtilityTest.class.getResource("/fake.p12"); + try { + PrivateKey pk = + BigQueryJdbcOAuthUtility.privateKeyFromP12Bytes( + Files.readAllBytes(Paths.get(resource.toURI())), "fake"); + assertNull(pk); + } catch (Exception e) { + assertTrue(false); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameterTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameterTest.java new file mode 100644 index 0000000000..815759892b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcParameterTest.java @@ -0,0 +1,75 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import org.junit.Test; + +public class BigQueryJdbcParameterTest { + + @Test + public void testSettersAndGetters() { + int expectedIndex = 3; + Object expectedValue = "String Object"; + Class expectedType = String.class; + StandardSQLTypeName expectedSqlType = StandardSQLTypeName.STRING; + String expectedParamName = "StringParameter"; + BigQueryStatementParameterType expectedParamType = BigQueryStatementParameterType.IN; + int expectedScale = -1; + + BigQueryJdbcParameter parameter = new BigQueryJdbcParameter(); + parameter.setIndex(3); + parameter.setValue("String Object"); + parameter.setType(String.class); + parameter.setSqlType(StandardSQLTypeName.STRING); + parameter.setParamName("StringParameter"); + parameter.setParamType(BigQueryStatementParameterType.IN); + parameter.setScale(-1); + + assertEquals(expectedIndex, parameter.getIndex()); + assertEquals(expectedValue, parameter.getValue()); + assertEquals(expectedType, parameter.getType()); + assertEquals(expectedSqlType, parameter.getSqlType()); + assertEquals(expectedParamName, parameter.getParamName()); + assertEquals(expectedParamType, parameter.getParamType()); + assertEquals(expectedScale, parameter.getScale()); + } + + @Test + public void testCopyConstructor() { + int expectedIndex = 3; + Object expectedValue = "String Object"; + Class expectedType = String.class; + StandardSQLTypeName expectedSqlType = StandardSQLTypeName.STRING; + + BigQueryJdbcParameter parameter = new BigQueryJdbcParameter(); + parameter.setIndex(3); + parameter.setValue("String Object"); + parameter.setType(String.class); + parameter.setSqlType(StandardSQLTypeName.STRING); + + BigQueryJdbcParameter copiedParameter = new BigQueryJdbcParameter(parameter); + + assertEquals(expectedIndex, copiedParameter.getIndex()); + assertEquals(expectedValue, copiedParameter.getValue()); + assertEquals(expectedType, copiedParameter.getType()); + assertEquals(expectedSqlType, copiedParameter.getSqlType()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtilityTest.java new file mode 100644 index 0000000000..a69cf68e0b --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcProxyUtilityTest.java @@ -0,0 +1,338 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; + +import com.google.api.gax.rpc.TransportChannelProvider; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.http.HttpTransportOptions; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.junit.Test; + +public class BigQueryJdbcProxyUtilityTest { + @Test + public void testParsingAllProxyProperties() { + Map result = + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null); + + assertThat(result.get("ProxyHost")).isEqualTo("34.94.167.18"); + assertThat(result.get("ProxyPort")).isEqualTo("3128"); + assertThat(result.get("ProxyUid")).isEqualTo("fahmz"); + assertThat(result.get("ProxyPwd")).isEqualTo("pass"); + } + + @Test + public void testParsingInvalidPortThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=portnumber;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingHostThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingPortThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingUidWithPwdThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyPwd=pass;", + null)); + } + + @Test + public void testMissingPwdWithUidThrowsIllegalArgument() { + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryJdbcProxyUtility.parseProxyProperties( + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;", + null)); + } + + @Test + public void testGetHttpTransportOptionsWithAuthenticatedProxy() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=TestProject" + + ";OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;" + + "ProxyUid=fahmz;" + + "ProxyPwd=pass;"; + + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(connection_uri, null); + HttpTransportOptions result = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + proxyProperties, null, null, null, null, "TestClass"); + assertNotNull(result); + } + + @Test + public void testGetHttpTransportOptionsWithNonAuthenticatedProxy() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=TestProject" + + ";OAuthType=3;" + + "ProxyHost=34.94.167.18;" + + "ProxyPort=3128;"; + + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(connection_uri, null); + HttpTransportOptions result = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + proxyProperties, null, null, null, null, "TestClass"); + assertNotNull(result); + } + + @Test + public void testGetHttpTransportOptionsWithNoProxySettingsReturnsNull() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=TestProject" + + ";OAuthType=3;"; + + Map proxyProperties = + BigQueryJdbcProxyUtility.parseProxyProperties(connection_uri, null); + HttpTransportOptions result = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + proxyProperties, null, null, null, null, "TestClass"); + assertNull(result); + } + + private String getTestResourcePath(String resourceName) throws URISyntaxException { + URL resourceUrl = getClass().getClassLoader().getResource(resourceName); + if (resourceUrl == null) { + throw new RuntimeException("Test resource not found: " + resourceName); + } + return new File(resourceUrl.toURI()).getAbsolutePath(); + } + + @Test + public void testGetHttpTransportOptions_withSslTrustStore_noPassword() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), trustStorePath, null, null, null, "TestClass"); + assertNotNull(options); + assertNotNull(options.getHttpTransportFactory()); + } + + @Test + public void testGetHttpTransportOptions_withSslTrustStore_withCorrectPassword() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_withpass.jks"); + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), + trustStorePath, + "testpassword", + null, + null, + "TestClass"); + assertNotNull(options); + assertNotNull(options.getHttpTransportFactory()); + } + + @Test + public void testGetHttpTransportOptions_withSslTrustStore_withIncorrectPassword() + throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_withpass.jks"); + BigQueryJdbcRuntimeException exception = + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), + trustStorePath, + "wrongpassword", + null, + null, + "TestClass")); + assertThat(exception.getCause()).isInstanceOf(IOException.class); + } + + @Test + public void testGetHttpTransportOptions_withInvalidSslTrustStorePath() { + String invalidPath = "/path/to/nonexistent/truststore.jks"; + BigQueryJdbcRuntimeException exception = + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), + invalidPath, + null, + null, + null, + "TestClass")); + + assertThat(exception.getCause()).isInstanceOf(FileNotFoundException.class); + } + + @Test + public void testGetHttpTransportOptions_withSslAndProxy() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + Map proxyProperties = new HashMap<>(); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, "proxy.example.com"); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, "8080"); + + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + proxyProperties, trustStorePath, null, null, null, "TestClass"); + assertNotNull(options); + assertNotNull(options.getHttpTransportFactory()); + } + + @Test + public void testGetTransportChannelProvider_withSslTrustStore_noPassword() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + Collections.emptyMap(), trustStorePath, null, "TestClass"); + assertNotNull(provider); + } + + @Test + public void testGetTransportChannelProvider_withSslTrustStore_withCorrectPassword() + throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_withpass.jks"); + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + Collections.emptyMap(), trustStorePath, "testpassword", "TestClass"); + assertNotNull(provider); + } + + @Test + public void testGetTransportChannelProvider_withSslAndProxy() throws Exception { + String trustStorePath = getTestResourcePath("test_truststore_nopass.jks"); + Map proxyProperties = new HashMap<>(); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_HOST_PROPERTY_NAME, "proxy.example.com"); + proxyProperties.put(BigQueryJdbcUrlUtility.PROXY_PORT_PROPERTY_NAME, "8080"); + + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + proxyProperties, trustStorePath, null, "TestClass"); + assertNotNull(provider); + } + + @Test + public void testGetTransportChannelProvider_noProxyNoSsl_returnsNull() { + TransportChannelProvider provider = + BigQueryJdbcProxyUtility.getTransportChannelProvider( + Collections.emptyMap(), null, null, "TestClass"); + assertNull(provider); + } + + @Test + public void testGetHttpTransportOptions_noProxyNoSsl_returnsNull() { + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), null, null, null, null, "TestClass"); + assertNull(options); + } + + @Test + public void testGetHttpTransportOptions_withTimeouts_returnsOptions() { + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), null, null, 10000, 20000, "TestClass"); + assertNotNull(options); + assertThat(options.getConnectTimeout()).isEqualTo(10000); + assertThat(options.getReadTimeout()).isEqualTo(20000); + } + + @Test + public void testGetHttpTransportOptions_withConnectTimeoutOnly_returnsOptions() { + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), null, null, 10000, null, "TestClass"); + assertNotNull(options); + assertThat(options.getConnectTimeout()).isEqualTo(10000); + // readTimeout defaults to HttpTransport's internal default, not asserted here. + } + + @Test + public void testGetHttpTransportOptions_withReadTimeoutOnly_returnsOptions() { + HttpTransportOptions options = + BigQueryJdbcProxyUtility.getHttpTransportOptions( + Collections.emptyMap(), null, null, null, 20000, "TestClass"); + assertNotNull(options); + assertThat(options.getReadTimeout()).isEqualTo(20000); + // connectTimeout defaults to HttpTransport's internal default, not asserted here. + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtilityTest.java new file mode 100644 index 0000000000..9b65719cbc --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJdbcUrlUtilityTest.java @@ -0,0 +1,214 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import java.util.Collections; +import java.util.Map; +import java.util.Properties; +import org.junit.Test; + +public class BigQueryJdbcUrlUtilityTest { + + @Test + public void testParsePropertyWithNoDefault() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "OAuthType"); + assertThat(result).isNull(); + } + + @Test + public void testParseUrlWithUnknownProperty_throwsException() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "UnknownProperty=SomeValue"; + + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseUriProperty(url, "ProjectId")); + } + + @Test + public void testParseUrlWithTypo_throwsException() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "ProjeectId=TypoValue"; + + assertThrows( + BigQueryJdbcRuntimeException.class, + () -> BigQueryJdbcUrlUtility.parseUriProperty(url, "ProjectId")); + } + + @Test + public void testParsePropertyWithDefault() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "OAuthType"); + assertThat(result).isEqualTo(null); + } + + @Test + public void testParsePropertyWithValue() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "ProjectId"); + assertThat(result).isEqualTo("MyBigQueryProject"); + } + + @Test + public void testParsePropertyWithValueCaseInsensitive() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PROJECTID=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "ProjectId"); + assertThat(result).isEqualTo("MyBigQueryProject"); + } + + @Test + public void testAppendPropertiesToURL() { + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;" + + "OAuthAccessToken=RedactedToken"; + Properties properties = new Properties(); + properties.setProperty("OAuthType", "3"); + + String updatedUrl = BigQueryJdbcUrlUtility.appendPropertiesToURL(url, null, properties); + assertThat(updatedUrl.contains("OAuthType=3")); + } + + @Test + public void testConnectionPropertiesFromURI() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=testProject;OAUTHTYPE=3;DEFAULTDATASET=testDataset;LOCATION=us-central1"; + + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "OAUTHTYPE")).isEqualTo("3"); + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "LOCATION")) + .isEqualTo("us-central1"); + } + + @Test + public void testConnectionPropertiesFromURIMultiline() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthPvtKey=value1\nvalue2\n;"; + + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "OAuthPvtKey")) + .isEqualTo("value1\nvalue2\n"); + } + + @Test + public void testConnectionPropertiesFromURIMultilineNoSemicolon() { + String connection_uri = + "bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthPvtKey=value1\nvalue2"; + + assertThat(BigQueryJdbcUrlUtility.parseUriProperty(connection_uri, "OAuthPvtKey")) + .isEqualTo("value1\nvalue2"); + } + + @Test + public void testParseUrl_longUnknownProperty_sanitized() { + String longKey = String.join("", Collections.nCopies(50, "a")); + String url = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + longKey + "=value"; + + BigQueryJdbcRuntimeException e = + assertThrows( + BigQueryJdbcRuntimeException.class, () -> BigQueryJdbcUrlUtility.parseUrl(url)); + + assertThat(e.getMessage()).contains("Wrong value or unknown setting: "); + assertThat(e.getMessage()).contains("..."); + assertThat(e.getMessage()).doesNotContain(longKey); + assertThat(e.getMessage().length()).isLessThan(100); + } + + @Test + public void testParsePartnerTokenProperty() { + // Case with partner name and environment + String url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PartnerToken=(GPN:partner_company; dev);ProjectId=MyBigQueryProject;"; + String expected = " (GPN:partner_company; dev)"; + String result = BigQueryJdbcUrlUtility.parseUriProperty(url, "PartnerToken"); + assertThat(result).isEqualTo(expected); + + // Case with only partner name + url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PartnerToken=(GPN:another_partner);ProjectId=MyBigQueryProject;"; + expected = " (GPN:another_partner)"; + result = BigQueryJdbcUrlUtility.parseUriProperty(url, "PartnerToken"); + assertThat(result).isEqualTo(expected); + + // Case when PartnerToken property is not present + url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=MyBigQueryProject;"; + result = BigQueryJdbcUrlUtility.parseUriProperty(url, "PartnerToken"); + assertNull(result); + + // Case when PartnerToken property is present but empty + url = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PartnerToken=();"; + result = BigQueryJdbcUrlUtility.parseUriProperty(url, "PartnerToken"); + assertNull(result); + + // Case when PartnerToken property is present but without partner name + url = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PartnerToken=(env);"; + result = BigQueryJdbcUrlUtility.parseUriProperty(url, "PartnerToken"); + assertNull(result); + + // Case with extra spaces around the values + url = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "PartnerToken= ( GPN: partner_name ; test_env ) ;"; + expected = " (GPN: partner_name ; test_env)"; + result = BigQueryJdbcUrlUtility.parseUriProperty(url, "PartnerToken"); + assertThat(result).isEqualTo(expected); + } + + @Test + public void testAppendPropertiesToURL_propertyWithSemicolon_isEscaped() throws Exception { + String url = "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;"; + Properties properties = new Properties(); + String complexValue = "value;ExtraProperty=injection"; + properties.setProperty("ProjectId", complexValue); + + String updatedUrl = BigQueryJdbcUrlUtility.appendPropertiesToURL(url, null, properties); + + Map parsedProperties = BigQueryJdbcUrlUtility.parseUrl(updatedUrl); + + assertThat(parsedProperties.get("ProjectId")).isEqualTo(complexValue); + assertFalse(parsedProperties.containsKey("ExtraProperty")); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfPrimitivesTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfPrimitivesTest.java new file mode 100644 index 0000000000..6f10ae79a0 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfPrimitivesTest.java @@ -0,0 +1,342 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.MARCH; +import static java.util.Arrays.copyOfRange; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import com.google.common.io.BaseEncoding; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.concurrent.TimeUnit; +import java.util.stream.Stream; +import org.junit.Before; +import org.junit.ClassRule; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; +import org.junit.runners.Parameterized.Parameters; + +@RunWith(Parameterized.class) +public class BigQueryJsonArrayOfPrimitivesTest { + + private final Field schema; + private final FieldValue arrayValues; + private final Object[] expected; + private final int javaSqlTypeCode; + private Array array; + private final StandardSQLTypeName currentType; + + @ClassRule public static final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + public BigQueryJsonArrayOfPrimitivesTest( + StandardSQLTypeName currentType, + Tuple schemaAndValue, + Object[] expected, + int javaSqlTypeCode) { + this.currentType = currentType; + this.schema = schemaAndValue.x(); + this.arrayValues = schemaAndValue.y(); + this.expected = expected; + this.javaSqlTypeCode = javaSqlTypeCode; + } + + @Before + public void setUp() { + array = new BigQueryJsonArray(this.schema, this.arrayValues); + } + + @Parameters(name = "{index}: primitive array of {0}") + public static Collection data() { + timeZoneRule.enforce(); + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820227000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820227000); + return Arrays.asList( + new Object[][] { + { + INT64, + arraySchemaAndValue(INT64, "10", "20", "30", "40"), + new Long[] {10L, 20L, 30L, 40L}, + Types.BIGINT + }, + { + BOOL, + arraySchemaAndValue(BOOL, "true", "false", "false", "true"), + new Boolean[] {true, false, false, true}, + Types.BOOLEAN + }, + { + FLOAT64, + arraySchemaAndValue(FLOAT64, "11.2", "33.4", "55.6", "77.8"), + new Double[] {11.2, 33.4, 55.6, 77.8}, + Types.DOUBLE + }, + { + NUMERIC, + arraySchemaAndValue(NUMERIC, "11.2657", "33.4657", "55.6657", "77.8657"), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + BIGNUMERIC, + arraySchemaAndValue(BIGNUMERIC, "11.2657", "33.4657", "55.6657", "77.8657"), + new BigDecimal[] { + new BigDecimal("11.2657"), + new BigDecimal("33.4657"), + new BigDecimal("55.6657"), + new BigDecimal("77.8657") + }, + Types.NUMERIC + }, + { + STRING, + arraySchemaAndValue(STRING, "one", "two", "three", "four"), + new String[] {"one", "two", "three", "four"}, + Types.NVARCHAR + }, + { + TIMESTAMP, + arraySchemaAndValue( + TIMESTAMP, + "1680174859.8202269", + "1680261259.8202269", + "1680347659.8202269", + "1680434059.8202269"), + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)), + Timestamp.valueOf(aTimeStamp.plusDays(2)), + Timestamp.valueOf(aTimeStamp.plusDays(3)) + }, + Types.TIMESTAMP + }, + { + DATE, + arraySchemaAndValue(DATE, "2023-03-30", "2023-03-31", "2023-04-01", "2023-04-02"), + new Date[] { + Date.valueOf(aDate), + Date.valueOf(aDate.plusDays(1)), + Date.valueOf(aDate.plusDays(2)), + Date.valueOf(aDate.plusDays(3)) + }, + Types.DATE + }, + { + TIME, + arraySchemaAndValue( + TIME, "11:14:19.820227", "11:14:20.820227", "11:14:21.820227", "11:14:22.820227"), + new Time[] { + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(1).toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(2).toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(3).toNanoOfDay())) + }, + Types.TIME + }, + { + DATETIME, + arraySchemaAndValue( + DATETIME, + "2023-03-30T11:14:19.820227", + "2023-03-30T11:15:19.820227", + "2023-03-30T11:16:19.820227", + "2023-03-30T11:17:19.820227"), + new Timestamp[] { + Timestamp.valueOf("2023-03-30 11:14:19.820227"), + Timestamp.valueOf("2023-03-30 11:15:19.820227"), + Timestamp.valueOf("2023-03-30 11:16:19.820227"), + Timestamp.valueOf("2023-03-30 11:17:19.820227") + }, + Types.TIMESTAMP + }, + { + GEOGRAPHY, + arraySchemaAndValue( + GEOGRAPHY, "POINT(-122 47)", "POINT(-122 48)", "POINT(-121 47)", "POINT(-123 48)"), + new String[] {"POINT(-122 47)", "POINT(-122 48)", "POINT(-121 47)", "POINT(-123 48)"}, + Types.OTHER + }, + { + BYTES, + arraySchemaAndValue( + BYTES, + Stream.of("one", "two", "three", "four") + .map(s -> BaseEncoding.base64().encode(s.getBytes())) + .toArray(String[]::new)), + new byte[][] { + "one".getBytes(), "two".getBytes(), "three".getBytes(), "four".getBytes() + }, + Types.VARBINARY + } + }); + } + + @Test + public void getArray() throws SQLException { + assertThat(array.getArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedArray() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + assertThat(array.getArray(fromIndex + 1, 2)).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = this.array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(columnValues.toArray()).isEqualTo(this.expected); + } + + @Test + public void getSlicedResultSet() throws SQLException { + int fromIndex = 1; + int toIndexExclusive = 3; + Object[] expectedSlicedArray = + copyOfRange(this.expected, fromIndex, toIndexExclusive); // copying index(1,2) + + // the first element is at index 1 + ResultSet resultSet = array.getResultSet(fromIndex + 1, 2); + + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + ArrayList indexList = indexAndValues.x(); + ArrayList columnValues = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(columnValues.toArray()).isEqualTo(expectedSlicedArray); + } + + @Test + public void getSlicedResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(this.currentType.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(this.javaSqlTypeCode); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getResultSet()); + ensureArrayIsInvalid(() -> array.getResultSet(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getResultSetWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getResultSet(emptyMap())); + Exception exception2 = + assertThrows( + SQLFeatureNotSupportedException.class, () -> array.getResultSet(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfStructTest.java new file mode 100644 index 0000000000..b390d642e4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonArrayOfStructTest.java @@ -0,0 +1,204 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.FieldValue.Attribute.PRIMITIVE; +import static com.google.cloud.bigquery.LegacySQLTypeName.RECORD; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.INVALID_ARRAY; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.nestedResultSetToColumnLists; +import static com.google.common.truth.Truth.assertThat; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.sql.Array; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Types; +import java.util.ArrayList; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.junit.function.ThrowingRunnable; + +public class BigQueryJsonArrayOfStructTest { + + private Array array; + + @Before + public void setUp() { + FieldList profileSchema = + FieldList.of( + Field.newBuilder("name", LegacySQLTypeName.STRING).build(), + Field.newBuilder("age", LegacySQLTypeName.INTEGER).build(), + Field.newBuilder("adult", LegacySQLTypeName.BOOLEAN).build()); + + FieldValue record1 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Arya"), + FieldValue.of(PRIMITIVE, "15"), + FieldValue.of(PRIMITIVE, "false")))); + FieldValue record2 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Khal Drogo"), + FieldValue.of(PRIMITIVE, "35"), + FieldValue.of(PRIMITIVE, "true")))); + FieldValue record3 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Ned Stark"), + FieldValue.of(PRIMITIVE, "45"), + FieldValue.of(PRIMITIVE, "true")))); + FieldValue record4 = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Jon Snow"), + FieldValue.of(PRIMITIVE, "25"), + FieldValue.of(PRIMITIVE, "true")))); + + Field arrayOfStructSchema = + Field.newBuilder("profiles", RECORD, profileSchema).setMode(Mode.REPEATED).build(); + + FieldValue arrayOfStructValue = + FieldValue.of( + Attribute.REPEATED, FieldValueList.of(asList(record1, record2, record3, record4))); + array = new BigQueryJsonArray(arrayOfStructSchema, arrayOfStructValue); + } + + @Test + public void getArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(); + + assertThat(structArray.length).isEqualTo(4); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[2].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structArray[3].getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedArray() throws SQLException { + Struct[] structArray = (Struct[]) array.getArray(2, 2); + + assertThat(structArray.length).isEqualTo(2); + assertThat(structArray[0].getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structArray[1].getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getSlicedArrayWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getArray(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {1, 2, 3, 4}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(2).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + assertThat(structs.get(3).getAttributes()).isEqualTo(asList("Jon Snow", 25L, true).toArray()); + } + + @Test + public void getSlicedResultSet() throws SQLException { + ResultSet resultSet = array.getResultSet(2, 2); + Tuple, ArrayList> indexAndValues = + nestedResultSetToColumnLists(resultSet); + + ArrayList indexList = indexAndValues.x(); + ArrayList structs = indexAndValues.y(); + + assertThat(indexList.toArray()).isEqualTo(new Object[] {2, 3}); + assertThat(structs.get(0).getAttributes()).isEqualTo(asList("Khal Drogo", 35L, true).toArray()); + assertThat(structs.get(1).getAttributes()).isEqualTo(asList("Ned Stark", 45L, true).toArray()); + } + + @Test + public void getResultSetWhenCountIsGreaterThanOriginalArrayLength() { + IllegalArgumentException illegalArgumentException = + assertThrows(IllegalArgumentException.class, () -> array.getResultSet(2, 10)); + assertThat(illegalArgumentException.getMessage()) + .isEqualTo("The array index is out of range: 12, number of elements: 4."); + } + + @Test + public void getBaseTypeName() throws SQLException { + assertThat(array.getBaseTypeName()).isEqualTo(StandardSQLTypeName.STRUCT.name()); + } + + @Test + public void getBaseType() throws SQLException { + assertThat(array.getBaseType()).isEqualTo(Types.STRUCT); + } + + @Test + public void free() throws SQLException { + this.array.free(); + + ensureArrayIsInvalid(() -> array.getArray()); + ensureArrayIsInvalid(() -> array.getArray(1, 2)); + ensureArrayIsInvalid(() -> array.getBaseTypeName()); + ensureArrayIsInvalid(() -> array.getBaseType()); + } + + @Test + public void getArrayWithCustomTypeMappingsIsNotSupported() { + Exception exception1 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(emptyMap())); + Exception exception2 = + assertThrows(SQLFeatureNotSupportedException.class, () -> array.getArray(1, 2, emptyMap())); + assertThat(exception1.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + assertThat(exception2.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + private void ensureArrayIsInvalid(ThrowingRunnable block) { + Exception exception = Assert.assertThrows(IllegalStateException.class, block); + assertThat(exception.getMessage()).isEqualTo(INVALID_ARRAY); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSetTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSetTest.java new file mode 100644 index 0000000000..4c715833ff --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonResultSetTest.java @@ -0,0 +1,476 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.MARCH; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import com.google.common.collect.ImmutableList; +import com.google.common.io.BaseEncoding; +import com.google.common.io.CharStreams; +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Array; +import java.sql.Blob; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Calendar; +import java.util.TimeZone; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class BigQueryJsonResultSetTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private static final FieldList fieldList = + FieldList.of( + Field.of("first", StandardSQLTypeName.BOOL), + Field.of("second", StandardSQLTypeName.INT64), + Field.of("third", StandardSQLTypeName.FLOAT64), + Field.of("fourth", StandardSQLTypeName.STRING), + Field.of("fifth", StandardSQLTypeName.TIMESTAMP), + Field.of("sixth", StandardSQLTypeName.BYTES), + Field.of("seventh", StandardSQLTypeName.STRING), + Field.newBuilder("eight", StandardSQLTypeName.INT64).setMode(Field.Mode.REPEATED).build(), + Field.of( + "ninth", + StandardSQLTypeName.STRUCT, + Field.of("first", StandardSQLTypeName.FLOAT64), + Field.of("second", StandardSQLTypeName.TIMESTAMP)), + Field.of("tenth", StandardSQLTypeName.NUMERIC), + Field.of("eleventh", StandardSQLTypeName.BIGNUMERIC), + Field.of("twelfth", LegacySQLTypeName.TIME), + Field.of("thirteenth", LegacySQLTypeName.INTEGER), + Field.of("fourteenth", LegacySQLTypeName.DATE)); + + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820000000); + LocalTime aTime = LocalTime.of(11, 14, 19, 820000000); + private static final String STRING_VAL = "STRING_VALUE"; + private static final Schema QUERY_SCHEMA = Schema.of(fieldList); + private final FieldValue booleanFv = FieldValue.of(Attribute.PRIMITIVE, "false"); + private final FieldValue integerFv = FieldValue.of(Attribute.PRIMITIVE, "1"); + private final FieldValue floatFv = FieldValue.of(Attribute.PRIMITIVE, "1.5"); + private final FieldValue stringFv = FieldValue.of(Attribute.PRIMITIVE, STRING_VAL); + private final FieldValue timestampFv = + FieldValue.of(Attribute.PRIMITIVE, "1680174859.820000"); // 2023-03-30 16:44:19.82 + + private final FieldValue bytesFv = + FieldValue.of( + Attribute.PRIMITIVE, + BaseEncoding.base64().encode(STRING_VAL.getBytes(StandardCharsets.UTF_8))); + + private final FieldValue nullFv = FieldValue.of(Attribute.PRIMITIVE, null); + private final FieldValue repeatedFv = + FieldValue.of( + Attribute.REPEATED, + FieldValueList.of( + ImmutableList.of( + FieldValue.of(Attribute.PRIMITIVE, "10"), + FieldValue.of(Attribute.PRIMITIVE, "20")))); + private final FieldValue recordFv = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + ImmutableList.of(floatFv, timestampFv), fieldList.get("ninth").getSubFields())); + private final FieldValue numericFv = FieldValue.of(Attribute.PRIMITIVE, "12345678"); + private final FieldValue bigNumericFv = FieldValue.of(Attribute.PRIMITIVE, "12345678.99"); + + private final FieldValue timeFv = FieldValue.of(Attribute.PRIMITIVE, "11:14:19.820000"); + + private final FieldValue shortFv = FieldValue.of(Attribute.PRIMITIVE, "10"); + private final FieldValue dateFv = FieldValue.of(Attribute.PRIMITIVE, "2020-01-15"); + + private final FieldValueList fieldValues = + FieldValueList.of( + ImmutableList.of( + booleanFv, // 1 + integerFv, // 2 + floatFv, // 3 + stringFv, // 4 + timestampFv, // 5 + bytesFv, // 6 + nullFv, // 7 + repeatedFv, // 8 + recordFv, // 9 + numericFv, // 10 + bigNumericFv, // 11 + timeFv, // 12 + shortFv, // 13 + dateFv // 14 + ), + fieldList); + + private BigQueryFieldValueListWrapper bigQueryFieldValueListWrapperNested; + + private BigQueryStatement statement; + private BigQueryStatement statementForTwoRows; + + private BigQueryJsonResultSet bigQueryJsonResultSet; + private BigQueryJsonResultSet bigQueryJsonResultSetNested; + + private BlockingQueue buffer; + private BlockingQueue bufferWithTwoRows; + + @Before + public void setUp() { + // Buffer with one row + buffer = new LinkedBlockingDeque<>(2); + statement = mock(BigQueryStatement.class); + buffer.add(BigQueryFieldValueListWrapper.of(fieldList, fieldValues)); + buffer.add(BigQueryFieldValueListWrapper.of(null, null, true)); // last marker + Thread[] workerThreads = {new Thread()}; + bigQueryJsonResultSet = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, buffer, statement, workerThreads); + + // Buffer with 2 rows. + bufferWithTwoRows = new LinkedBlockingDeque<>(3); + statementForTwoRows = mock(BigQueryStatement.class); + bufferWithTwoRows.add(BigQueryFieldValueListWrapper.of(fieldList, fieldValues)); + bufferWithTwoRows.add(BigQueryFieldValueListWrapper.of(fieldList, fieldValues)); + bufferWithTwoRows.add(BigQueryFieldValueListWrapper.of(null, null, true)); // last marker + + // values for nested types + Field fieldEight = fieldList.get("eight"); + FieldValue fieldEightValue = fieldValues.get("eight"); + FieldList nestedFieldList = Schema.of(fieldEight).getFields(); + bigQueryFieldValueListWrapperNested = + BigQueryFieldValueListWrapper.getNestedFieldValueListWrapper( + nestedFieldList, fieldEightValue.getRepeatedValue()); + bigQueryJsonResultSetNested = + BigQueryJsonResultSet.getNestedResultSet( + Schema.of(fieldEight), + bigQueryFieldValueListWrapperNested, + 0, + fieldEightValue.getRepeatedValue().size()); + } + + private boolean resetResultSet() + throws SQLException { // re-initialises the resultset and moves the cursor to the first row + Thread[] workerThreads = {new Thread()}; + bigQueryJsonResultSet = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, buffer, statement, workerThreads); + return bigQueryJsonResultSet.next(); // move to the first row + } + + @Test + public void testIsClosed() { + assertThat(bigQueryJsonResultSet.isClosed()).isFalse(); + } + + @Test + public void testClose() { + // TODO(prashant): Add test case after close method is implemented + } + + @Test + public void testRowCount() throws SQLException { + Thread[] workerThreads = {new Thread()}; + // ResultSet with 1 row buffer and 1 total rows. + BigQueryJsonResultSet bigQueryJsonResultSet2 = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, buffer, statement, workerThreads); + assertThat(resultSetRowCount(bigQueryJsonResultSet2)).isEqualTo(1); + // ResultSet with 2 rows buffer and 1 total rows. + bigQueryJsonResultSet2 = + BigQueryJsonResultSet.of( + QUERY_SCHEMA, 1L, bufferWithTwoRows, statementForTwoRows, workerThreads); + assertThat(resultSetRowCount(bigQueryJsonResultSet2)).isEqualTo(1); + } + + @Test + // This method tests iteration and Resultset's type getters + public void testIteration() throws SQLException { + int cnt = 0; + assertThat(bigQueryJsonResultSet.isBeforeFirst()).isTrue(); + while (bigQueryJsonResultSet.next()) { + cnt++; + assertThat(bigQueryJsonResultSet.isLast()).isTrue(); // we have one test row + assertThat(bigQueryJsonResultSet.isFirst()).isTrue(); // we have one test row + assertThat(bigQueryJsonResultSet.getBoolean("first")).isFalse(); + assertThat(bigQueryJsonResultSet.getBoolean(1)).isFalse(); + assertThat(bigQueryJsonResultSet.getInt("second")).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getInt(2)).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getFloat("third")).isEqualTo(1.5f); + assertThat(bigQueryJsonResultSet.getFloat(3)).isEqualTo(1.5f); + assertThat(bigQueryJsonResultSet.getString("fourth")).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getString(4)).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getTimestamp("fifth")) + .isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.getTimestamp(5)).isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.wasNull()).isFalse(); + assertThat(bigQueryJsonResultSet.getObject("seventh")).isNull(); // test null + assertThat(bigQueryJsonResultSet.getObject(7)).isNull(); + assertThat(bigQueryJsonResultSet.wasNull()).isTrue(); + assertThat(bigQueryJsonResultSet.getArray("eight").getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(bigQueryJsonResultSet.getArray(8).getArray()).isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryJsonResultSet.getObject("eight")).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(((Array) bigQueryJsonResultSet.getObject(8)).getArray()) + .isEqualTo(new Object[] {10L, 20L}); + assertThat(((Struct) bigQueryJsonResultSet.getObject("ninth")).getAttributes()) + .isEqualTo(new Object[] {1.5, Timestamp.valueOf(aTimeStamp)}); + assertThat(((Struct) bigQueryJsonResultSet.getObject(9)).getAttributes()) + .isEqualTo(new Object[] {1.5, Timestamp.valueOf(aTimeStamp)}); + assertThat(bigQueryJsonResultSet.getLong("tenth")).isEqualTo(12345678L); + assertThat(bigQueryJsonResultSet.getLong(10)).isEqualTo(12345678L); + assertThat(bigQueryJsonResultSet.getDouble("eleventh")).isEqualTo(12345678.99D); + assertThat(bigQueryJsonResultSet.getDouble(11)).isEqualTo(12345678.99D); + Time expectedTime = new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())); + assertThat(bigQueryJsonResultSet.getTime("twelfth")).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getTime(12)).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getShort("thirteenth")).isEqualTo((short) 10); + assertThat(bigQueryJsonResultSet.getShort(13)).isEqualTo((short) 10); + } + assertThat(cnt).isEqualTo(1); + assertThat(bigQueryJsonResultSet.next()).isFalse(); + assertThat(bigQueryJsonResultSet.isAfterLast()).isTrue(); + } + + @Test + public void testGetObjectWithPrimitives() throws SQLException { + bigQueryJsonResultSet.next(); + assertThat(bigQueryJsonResultSet.getObject("first")).isEqualTo(false); + assertThat(bigQueryJsonResultSet.getObject(1)).isEqualTo(false); + assertThat(bigQueryJsonResultSet.getObject("second")).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getObject(2)).isEqualTo(1); + assertThat(bigQueryJsonResultSet.getObject("third")).isEqualTo(1.5); + assertThat(bigQueryJsonResultSet.getObject(3)).isEqualTo(1.5); + assertThat(bigQueryJsonResultSet.getObject("fourth")).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getObject(4)).isEqualTo(STRING_VAL); + assertThat(bigQueryJsonResultSet.getObject("fifth")).isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.getObject(5)).isEqualTo(Timestamp.valueOf(aTimeStamp)); + assertThat(bigQueryJsonResultSet.getObject("sixth")) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + assertThat(bigQueryJsonResultSet.getObject(6)) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + assertThat(bigQueryJsonResultSet.wasNull()).isFalse(); + assertThat(bigQueryJsonResultSet.getObject("seventh")).isNull(); // test null + assertThat(bigQueryJsonResultSet.getObject(7)).isNull(); + assertThat(bigQueryJsonResultSet.wasNull()).isTrue(); + + assertThat(bigQueryJsonResultSet.getObject("tenth")).isEqualTo(new BigDecimal("12345678")); + assertThat(bigQueryJsonResultSet.getObject(10)).isEqualTo(new BigDecimal("12345678")); + assertThat(bigQueryJsonResultSet.getObject("eleventh")) + .isEqualTo(new BigDecimal("12345678.99")); + assertThat(bigQueryJsonResultSet.getObject(11)).isEqualTo(new BigDecimal("12345678.99")); + Time expectedTime = new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())); + assertThat(bigQueryJsonResultSet.getObject("twelfth")).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getObject(12)).isEqualTo(expectedTime); + assertThat(bigQueryJsonResultSet.getObject("thirteenth")).isEqualTo((short) 10); + assertThat(bigQueryJsonResultSet.getObject(13)).isEqualTo((short) 10); + } + + // validate the input streams + @Test + public void testCharacterStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + Reader charStream = bigQueryJsonResultSet.getCharacterStream("fourth"); + String expectedVal = CharStreams.toString(charStream); + assertThat(expectedVal).isEqualTo(STRING_VAL); + } + + @Test + public void testBinaryStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + StringBuilder textBuilder = new StringBuilder(); + InputStream binInputStream = bigQueryJsonResultSet.getBinaryStream(6); + Reader reader = new BufferedReader(new InputStreamReader(binInputStream)); + int c; + while ((c = reader.read()) != -1) { + textBuilder.append((char) c); + } + assertThat(textBuilder.toString()).isEqualTo(STRING_VAL); + reader.close(); + } + + @Test + public void testAsciiStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + StringBuilder textBuilder = new StringBuilder(); + InputStream binInputStream = bigQueryJsonResultSet.getAsciiStream(4); + Reader reader = new BufferedReader(new InputStreamReader(binInputStream)); + int c; + while ((c = reader.read()) != -1) { + textBuilder.append((char) c); + } + String expectedAsciiString = + new String(STRING_VAL.getBytes(), 0, STRING_VAL.length(), StandardCharsets.US_ASCII); + assertThat(textBuilder.length()).isEqualTo(expectedAsciiString.length()); + assertThat(textBuilder.toString()).isEqualTo(expectedAsciiString); + reader.close(); + } + + @Test + public void testUnicodeStream() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + InputStream binInputStream = bigQueryJsonResultSet.getUnicodeStream(4); + byte[] cbuf = new byte[100]; + int len = binInputStream.read(cbuf, 0, cbuf.length); + String colFourVal = new String(cbuf, 0, len, StandardCharsets.UTF_16LE); + assertThat(colFourVal).isEqualTo(STRING_VAL); + } + + @Test + public void testClob() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + java.sql.Clob clobVal = bigQueryJsonResultSet.getClob(4); + StringBuilder textBuilder = new StringBuilder(); + Reader charStream = clobVal.getCharacterStream(); + int intValueOfChar; + while ((intValueOfChar = charStream.read()) != -1) { + textBuilder.append((char) intValueOfChar); + } + charStream.close(); + assertThat(textBuilder.toString()).isEqualTo(STRING_VAL); + } + + @Test + public void testBlob() throws SQLException, IOException { + assertThat(resetResultSet()).isTrue(); + StringBuilder textBuilder = new StringBuilder(); + Blob blobVal = bigQueryJsonResultSet.getBlob(6); + InputStream binInputStream = blobVal.getBinaryStream(); + Reader reader = new BufferedReader(new InputStreamReader(binInputStream)); + int c; + while ((c = reader.read()) != -1) { + textBuilder.append((char) c); + } + assertThat(textBuilder.toString()).isEqualTo(STRING_VAL); + reader.close(); + } + + @Test + public void testBytes() throws SQLException { + assertThat(resetResultSet()).isTrue(); + assertThat(bigQueryJsonResultSet.getBytes("sixth")) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + assertThat(bigQueryJsonResultSet.getBytes(6)) + .isEqualTo(STRING_VAL.getBytes(StandardCharsets.UTF_8)); + } + + @Test + public void testResultSetHoldability() + throws SQLException { // TODO(prashant): Revisit this after Statement's commit is finalised + assertThat(bigQueryJsonResultSet.getHoldability()) + .isEqualTo(ResultSet.HOLD_CURSORS_OVER_COMMIT); + } + + @Test + public void testStatement() throws SQLException { + assertThat(bigQueryJsonResultSet.getStatement()).isEqualTo(statement); + assertThat(bigQueryJsonResultSetNested.getStatement()).isNull(); + } + + @Test + public void testConcurrency() throws SQLException { + assertThat(bigQueryJsonResultSet.getConcurrency()).isEqualTo(ResultSet.CONCUR_READ_ONLY); + assertThat(bigQueryJsonResultSet.getType()).isEqualTo(ResultSet.TYPE_FORWARD_ONLY); + assertThat(bigQueryJsonResultSet.findColumn("first")).isEqualTo(1); + } + + @Test + public void testIterationNested() throws SQLException { + int cnt = 0; + assertThat(bigQueryJsonResultSetNested.isBeforeFirst()).isTrue(); + while (bigQueryJsonResultSetNested.next()) { + cnt++; + if (cnt == 1) { + assertThat(bigQueryJsonResultSetNested.isFirst()).isTrue(); + + } else { // 2nd row is the last row + assertThat(bigQueryJsonResultSetNested.isLast()).isTrue(); + } + assertThat(bigQueryJsonResultSetNested.getInt(1)) + .isEqualTo(cnt); // the first column is index 1 + assertThat(bigQueryJsonResultSetNested.getInt(2)) + .isEqualTo(cnt * 10); // second column has values 10 and 20 + } + assertThat(cnt).isEqualTo(2); + assertThat(bigQueryJsonResultSetNested.next()).isFalse(); + assertThat(bigQueryJsonResultSetNested.isAfterLast()).isTrue(); + } + + @Test + public void testTime() throws SQLException { + assertThat(resetResultSet()).isTrue(); + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("EST")); + Time expectedTime = new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())); + assertThat(bigQueryJsonResultSet.getTime(12)) + .isEqualTo(bigQueryJsonResultSet.getTime(12, calendar)); + assertThat(expectedTime).isEqualTo(bigQueryJsonResultSet.getTime(12, calendar)); + assertThat(bigQueryJsonResultSet.getTime("twelfth")) + .isEqualTo(bigQueryJsonResultSet.getTime("twelfth", calendar)); + } + + @Test + public void testTimestamp() throws SQLException { + assertThat(resetResultSet()).isTrue(); + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("EST")); + Timestamp time = bigQueryJsonResultSet.getTimestamp(5); + Timestamp timeWithCal = bigQueryJsonResultSet.getTimestamp(5, calendar); + assertThat(time).isEqualTo(timeWithCal); + assertThat(bigQueryJsonResultSet.getTimestamp("fifth")) + .isEqualTo(bigQueryJsonResultSet.getTimestamp("fifth")); + } + + @Test + public void testDate() throws SQLException { + assertThat(resetResultSet()).isTrue(); + Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("EST")); + // epoc should match + assertThat(bigQueryJsonResultSet.getDate(14).getTime()) + .isEqualTo(bigQueryJsonResultSet.getDate(14, calendar).getTime()); + assertThat(Date.valueOf("2020-01-15").getTime()) + .isEqualTo(bigQueryJsonResultSet.getDate(14, calendar).getTime()); + assertThat(bigQueryJsonResultSet.getDate("fourteenth").getTime()) + .isEqualTo(bigQueryJsonResultSet.getDate("fourteenth", calendar).getTime()); + } + + private int resultSetRowCount(BigQueryJsonResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStructTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStructTest.java new file mode 100644 index 0000000000..f07d8cad27 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryJsonStructTest.java @@ -0,0 +1,264 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.FieldValue.Attribute.PRIMITIVE; +import static com.google.cloud.bigquery.FieldValue.Attribute.RECORD; +import static com.google.cloud.bigquery.StandardSQLTypeName.BIGNUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.BOOL; +import static com.google.cloud.bigquery.StandardSQLTypeName.BYTES; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATE; +import static com.google.cloud.bigquery.StandardSQLTypeName.DATETIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.FLOAT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.GEOGRAPHY; +import static com.google.cloud.bigquery.StandardSQLTypeName.INT64; +import static com.google.cloud.bigquery.StandardSQLTypeName.NUMERIC; +import static com.google.cloud.bigquery.StandardSQLTypeName.STRING; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIME; +import static com.google.cloud.bigquery.StandardSQLTypeName.TIMESTAMP; +import static com.google.cloud.bigquery.jdbc.BigQueryErrorMessage.CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.arraySchemaAndValue; +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.primitiveSchemaAndValue; +import static com.google.common.io.BaseEncoding.base64; +import static com.google.common.truth.Truth.assertThat; +import static java.time.Month.MARCH; +import static java.util.Arrays.asList; +import static java.util.Collections.emptyMap; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.jdbc.rules.TimeZoneRule; +import com.google.common.io.BaseEncoding; +import java.math.BigDecimal; +import java.sql.Array; +import java.sql.Date; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.Arrays; +import java.util.List; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.junit.Before; +import org.junit.Rule; +import org.junit.Test; + +public class BigQueryJsonStructTest { + + @Rule public final TimeZoneRule timeZoneRule = new TimeZoneRule("UTC"); + + private Struct structWithPrimitiveValues; + private Struct structWithNullValue; + + @Before + public void setUp() { + List> schemaAndValues = + Arrays.asList( + primitiveSchemaAndValue(INT64, "10"), + primitiveSchemaAndValue(BOOL, "true"), + primitiveSchemaAndValue(FLOAT64, "11.2"), + primitiveSchemaAndValue(NUMERIC, "11.2657"), + primitiveSchemaAndValue(BIGNUMERIC, "11.2657"), + primitiveSchemaAndValue(STRING, "one"), + primitiveSchemaAndValue(TIMESTAMP, "1680174859.8200000"), // 2023-03-30 16:44:19.82 + primitiveSchemaAndValue(DATE, "2023-03-30"), + primitiveSchemaAndValue(TIME, "11:14:19.820000"), + primitiveSchemaAndValue(DATETIME, "2023-03-30T11:14:19.8200000"), + primitiveSchemaAndValue(GEOGRAPHY, "POINT(-122 47)"), + primitiveSchemaAndValue(BYTES, base64().encode("one".getBytes()))); + List orderedSchemas = + schemaAndValues.stream().map(Tuple::x).collect(Collectors.toList()); + List orderedValues = + schemaAndValues.stream().map(Tuple::y).collect(Collectors.toList()); + + structWithPrimitiveValues = + new BigQueryJsonStruct( + FieldList.of(orderedSchemas), FieldValue.of(RECORD, FieldValueList.of(orderedValues))); + structWithNullValue = + new BigQueryJsonStruct(FieldList.of(orderedSchemas), FieldValue.of(PRIMITIVE, null)); + } + + @Test + public void structOfPrimitives() throws SQLException { + assertThat(structWithPrimitiveValues.getAttributes()) + .isEqualTo( + Arrays.asList( + 10L, + true, + 11.2, + new BigDecimal("11.2657"), + new BigDecimal("11.2657"), + "one", + Timestamp.valueOf(LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820000000)), + Date.valueOf(LocalDate.of(2023, MARCH, 30)), + new Time( + TimeUnit.NANOSECONDS.toMillis( + LocalTime.parse("11:14:19.820").toNanoOfDay())), + Timestamp.valueOf("2023-03-30 11:14:19.8200000"), + "POINT(-122 47)", + "one".getBytes()) + .toArray()); + } + + @Test + public void structOfArrays() throws SQLException { + LocalDateTime aTimeStamp = LocalDateTime.of(2023, MARCH, 30, 11, 14, 19, 820000000); + LocalDate aDate = LocalDate.of(2023, MARCH, 30); + LocalTime aTime = LocalTime.of(11, 14, 19, 820000000); + List> schemaAndValues = + Arrays.asList( + arraySchemaAndValue(INT64, "10", "20"), + arraySchemaAndValue(BOOL, "true", "false"), + arraySchemaAndValue(FLOAT64, "11.2", "33.4"), + arraySchemaAndValue(NUMERIC, "11.2657", "33.4657"), + arraySchemaAndValue(BIGNUMERIC, "11.2657", "33.4657"), + arraySchemaAndValue(STRING, "one", "two"), + arraySchemaAndValue(TIMESTAMP, "1680174859.820000", "1680261259.820000"), + arraySchemaAndValue(DATE, "2023-03-30", "2023-03-31"), + arraySchemaAndValue(TIME, "11:14:19.820000", "11:14:20.820000"), + arraySchemaAndValue( + DATETIME, "2023-03-30T11:14:19.820000", "2023-03-30T11:15:19.820000"), + arraySchemaAndValue(GEOGRAPHY, "POINT(-122 47)", "POINT(-122 48)"), + arraySchemaAndValue( + BYTES, + Stream.of("one", "two") + .map(s -> BaseEncoding.base64().encode(s.getBytes())) + .toArray(String[]::new))); + + List orderedSchemas = + schemaAndValues.stream().map(Tuple::x).collect(Collectors.toList()); + List orderedValues = + schemaAndValues.stream().map(Tuple::y).collect(Collectors.toList()); + + Struct struct = + new BigQueryJsonStruct( + FieldList.of(orderedSchemas), FieldValue.of(RECORD, FieldValueList.of(orderedValues))); + + Object[] attributes = struct.getAttributes(); + assertThat(((Array) attributes[0]).getArray()).isEqualTo(new Long[] {10L, 20L}); + assertThat(((Array) attributes[1]).getArray()).isEqualTo(new Boolean[] {true, false}); + assertThat(((Array) attributes[2]).getArray()).isEqualTo(new Double[] {11.2, 33.4}); + assertThat(((Array) attributes[3]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[4]).getArray()) + .isEqualTo(new BigDecimal[] {new BigDecimal("11.2657"), new BigDecimal("33.4657")}); + assertThat(((Array) attributes[5]).getArray()).isEqualTo(new String[] {"one", "two"}); + assertThat(((Array) attributes[6]).getArray()) + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf(aTimeStamp), // 2023-03-30 16:44:19.82 + Timestamp.valueOf(aTimeStamp.plusDays(1)) + }); + assertThat(((Array) attributes[7]).getArray()) + .isEqualTo(new Date[] {Date.valueOf(aDate), Date.valueOf(aDate.plusDays(1))}); + assertThat(((Array) attributes[8]).getArray()) + .isEqualTo( + new Time[] { + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.toNanoOfDay())), + new Time(TimeUnit.NANOSECONDS.toMillis(aTime.plusSeconds(1).toNanoOfDay())) + }); + assertThat(((Array) attributes[9]).getArray()) // DATETIME + .isEqualTo( + new Timestamp[] { + Timestamp.valueOf("2023-03-30 11:14:19.820000"), + Timestamp.valueOf("2023-03-30 11:15:19.820000") + }); + assertThat(((Array) attributes[10]).getArray()) + .isEqualTo(new String[] {"POINT(-122 47)", "POINT(-122 48)"}); + assertThat(((Array) attributes[11]).getArray()) + .isEqualTo(new byte[][] {"one".getBytes(), "two".getBytes()}); + } + + @Test + public void structOfStructs() throws SQLException { + FieldList profileSchema = + FieldList.of( + Field.of("name", LegacySQLTypeName.STRING), + Field.of("age", LegacySQLTypeName.INTEGER), + Field.of("adult", LegacySQLTypeName.BOOLEAN)); + FieldList addressSchema = + FieldList.of( + Field.of("state", LegacySQLTypeName.STRING), + Field.of("zip", LegacySQLTypeName.INTEGER)); + FieldList rootStructSchema = + FieldList.of( + Field.of("profile", LegacySQLTypeName.RECORD, profileSchema), + Field.of("address", LegacySQLTypeName.RECORD, addressSchema)); + + FieldValue profileValue = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList( + FieldValue.of(PRIMITIVE, "Arya"), + FieldValue.of(PRIMITIVE, "15"), + FieldValue.of(PRIMITIVE, "false")))); + FieldValue addressValue = + FieldValue.of( + Attribute.RECORD, + FieldValueList.of( + asList(FieldValue.of(PRIMITIVE, "Michigan"), FieldValue.of(PRIMITIVE, "49086")))); + + FieldValue rootStructValue = + FieldValue.of(RECORD, FieldValueList.of(asList(profileValue, addressValue))); + + Struct struct = new BigQueryJsonStruct(rootStructSchema, rootStructValue); + Object[] attributes = struct.getAttributes(); + Struct profileStruct = (Struct) attributes[0]; + Struct addressStruct = (Struct) attributes[1]; + + assertThat(profileStruct.getAttributes()).isEqualTo(asList("Arya", 15L, false).toArray()); + assertThat(addressStruct.getAttributes()).isEqualTo(asList("Michigan", 49086L).toArray()); + } + + @Test + public void structWithNullValue() throws SQLException { + assertThat(structWithNullValue.getAttributes()) + .isEqualTo( + Arrays.asList(0L, false, 0.0, null, null, null, null, null, null, null, null, null) + .toArray()); + } + + @Test + public void getSQLTypeNameIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, structWithPrimitiveValues::getSQLTypeName); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } + + @Test + public void getAttributesWithCustomTypeMappingsIsNotSupported() { + Exception exception = + assertThrows( + SQLFeatureNotSupportedException.class, + () -> structWithPrimitiveValues.getAttributes(emptyMap())); + assertThat(exception.getMessage()).isEqualTo(CUSTOMER_TYPE_MAPPING_NOT_SUPPORTED); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandlerTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandlerTest.java new file mode 100644 index 0000000000..0dc085b602 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryParameterHandlerTest.java @@ -0,0 +1,142 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; + +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.jdbc.BigQueryParameterHandler.BigQueryStatementParameterType; +import org.junit.Test; + +public class BigQueryParameterHandlerTest { + + @Test + public void testGetSetParameterByName() throws Exception { + BigQueryParameterHandler paramHandler = new BigQueryParameterHandler(2); + // Add Param 1 + paramHandler.setParameter( + "ParamKey1", "ParamValue1", String.class, BigQueryStatementParameterType.IN, -1); + String paramValue = (String) paramHandler.getParameter("ParamKey1"); + assertNotNull(paramValue); + assertEquals("ParamValue1", paramValue); + BigQueryStatementParameterType paramType = paramHandler.getParameterType("ParamKey1"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.IN, paramType); + int scale = paramHandler.getParameterScale("ParamKey1"); + assertEquals(-1, scale); + assertEquals(String.class, paramHandler.getType("ParamKey1")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey1")); + + // Add Param 2 + paramHandler.setParameter( + "ParamKey2", "ParamValue2", String.class, BigQueryStatementParameterType.INOUT, 1); + paramValue = (String) paramHandler.getParameter("ParamKey2"); + assertNotNull(paramValue); + assertEquals("ParamValue2", paramValue); + paramType = paramHandler.getParameterType("ParamKey2"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.INOUT, paramType); + scale = paramHandler.getParameterScale("ParamKey2"); + assertEquals(1, scale); + assertEquals(String.class, paramHandler.getType("ParamKey2")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey2")); + + // Update Param 1 + paramHandler.setParameter( + "ParamKey1", "ParamValue1-UPD", String.class, BigQueryStatementParameterType.OUT, 1); + paramValue = (String) paramHandler.getParameter("ParamKey1"); + assertNotNull(paramValue); + assertEquals("ParamValue1-UPD", paramValue); + paramType = paramHandler.getParameterType("ParamKey1"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.OUT, paramType); + scale = paramHandler.getParameterScale("ParamKey1"); + assertEquals(1, scale); + assertEquals(String.class, paramHandler.getType("ParamKey1")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey1")); + + // Update Param 2 + paramHandler.setParameter( + "ParamKey2", "ParamValue2-UPD", String.class, BigQueryStatementParameterType.INOUT, 2); + paramValue = (String) paramHandler.getParameter("ParamKey2"); + assertNotNull(paramValue); + assertEquals("ParamValue2-UPD", paramValue); + paramType = paramHandler.getParameterType("ParamKey2"); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.INOUT, paramType); + scale = paramHandler.getParameterScale("ParamKey2"); + assertEquals(2, scale); + assertEquals(String.class, paramHandler.getType("ParamKey2")); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType("ParamKey2")); + } + + @Test + public void testGetSetParameterByIndex() throws Exception { + BigQueryParameterHandler paramHandler = new BigQueryParameterHandler(2); + + // Add Param 1 + paramHandler.setParameter(1, "ParamValue1", String.class); + String value = (String) paramHandler.getParameter(1); + assertNotNull(value); + assertEquals("ParamValue1", value); + BigQueryStatementParameterType paramType = paramHandler.getParameterType(1); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.UNSPECIFIED, paramType); + assertEquals(String.class, paramHandler.getType(1)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(1)); + + // Add Param 2 + paramHandler.setParameter( + 2, "ParamValue2", String.class, BigQueryStatementParameterType.IN, -1); + value = (String) paramHandler.getParameter(2); + assertNotNull(value); + assertEquals("ParamValue2", value); + paramType = paramHandler.getParameterType(2); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.IN, paramType); + int scale = paramHandler.getParameterScale(2); + assertEquals(-1, scale); + assertEquals(String.class, paramHandler.getType(2)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(2)); + + // Update Param 1 + paramHandler.setParameter(1, "ParamValue1-UPD", String.class); + value = (String) paramHandler.getParameter(1); + assertNotNull(value); + assertEquals("ParamValue1-UPD", value); + paramType = paramHandler.getParameterType(1); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.UNSPECIFIED, paramType); + assertEquals(String.class, paramHandler.getType(1)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(1)); + + // Update Param 2 + paramHandler.setParameter( + 2, "ParamValue2-UPD", String.class, BigQueryStatementParameterType.OUT, 2); + value = (String) paramHandler.getParameter(2); + assertNotNull(value); + assertEquals("ParamValue2-UPD", value); + paramType = paramHandler.getParameterType(2); + assertNotNull(paramType); + assertEquals(BigQueryStatementParameterType.OUT, paramType); + scale = paramHandler.getParameterScale(2); + assertEquals(2, scale); + assertEquals(String.class, paramHandler.getType(2)); + assertEquals(StandardSQLTypeName.STRING, paramHandler.getSqlType(2)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnectionTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnectionTest.java new file mode 100644 index 0000000000..a394e53d0d --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryPooledConnectionTest.java @@ -0,0 +1,176 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.utils.TestUtilities.TestConnectionListener; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import java.io.IOException; +import java.sql.*; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryPooledConnectionTest { + private BigQueryConnection bigQueryConnection; + private static final Long LISTENER_POOL_SIZE = 10L; + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + doReturn(LISTENER_POOL_SIZE).when(bigQueryConnection).getListenerPoolSize(); + } + + @Test + public void testGetPooledConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + } + + @Test + public void testPooledConnectionClose() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + } + + @Test + public void testReuseConnectionAfterClose() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + + connection = pooledConnection.getConnection(); + assertTrue(pooledConnection.inUse()); + } + + @Test + public void testAddConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + + assertTrue(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testRemoveConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + assertTrue(pooledConnection.isListenerPooled(listner)); + pooledConnection.removeConnectionEventListener(listner); + assertFalse(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testConnectionHandleClosedByConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + assertEquals(1, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + assertTrue(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testConnectionHandleClosedByPooledConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + pooledConnection.close(); + assertFalse(pooledConnection.inUse()); + assertEquals(1, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + assertTrue(pooledConnection.isListenerPooled(listner)); + } + + @Test + public void testFireConnectionError() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + TestConnectionListener listner = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listner); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(0, listner.getConnectionErrorCount()); + + pooledConnection.fireConnectionError(new SQLException("test")); + assertFalse(pooledConnection.inUse()); + assertEquals(0, listner.getConnectionClosedCount()); + assertEquals(1, listner.getConnectionErrorCount()); + + assertFalse(pooledConnection.isListenerPooled(listner)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizersTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizersTest.java new file mode 100644 index 0000000000..7332dce936 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetFinalizersTest.java @@ -0,0 +1,67 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import org.junit.Before; +import org.junit.Test; + +public class BigQueryResultSetFinalizersTest { + Thread arrowWorker; + Thread[] jsonWorkers; + + @Before + public void setUp() { + // create and start the demon threads + arrowWorker = + new Thread( + () -> { + while (true) { + if (Thread.currentThread().isInterrupted()) { + break; + } + } + }); + arrowWorker.setDaemon(true); + Thread jsonWorker = + new Thread( + () -> { + while (true) { + if (Thread.currentThread().isInterrupted()) { + break; + } + } + }); + jsonWorker.setDaemon(true); + jsonWorkers = new Thread[] {jsonWorker}; + arrowWorker.start(); + jsonWorker.start(); + } + + @Test + public void testFinalizeResources() { + BigQueryResultSetFinalizers.ArrowResultSetFinalizer arrowResultSetFinalizer = + new BigQueryResultSetFinalizers.ArrowResultSetFinalizer(null, null, arrowWorker); + arrowResultSetFinalizer.finalizeResources(); + assertThat(arrowWorker.isInterrupted()).isTrue(); + BigQueryResultSetFinalizers.JsonResultSetFinalizer jsonResultSetFinalizer = + new BigQueryResultSetFinalizers.JsonResultSetFinalizer(null, null, jsonWorkers); + jsonResultSetFinalizer.finalizeResources(); + assertThat(jsonWorkers[0].isInterrupted()).isTrue(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadataTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadataTest.java new file mode 100644 index 0000000000..b4d14296d1 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryResultSetMetadataTest.java @@ -0,0 +1,277 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.mockito.Mockito.mock; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.common.collect.ImmutableList; +import java.sql.Array; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Types; +import java.util.List; +import org.junit.Before; +import org.junit.Test; +import org.junit.runner.RunWith; +import org.mockito.junit.MockitoJUnitRunner; + +@RunWith(MockitoJUnitRunner.class) +public class BigQueryResultSetMetadataTest { + + private BigQueryStatement statement; + + private static Field tenthField = + Field.newBuilder("tenth", LegacySQLTypeName.NUMERIC) + .setName("tenth") + .setType(StandardSQLTypeName.NUMERIC) + .setPrecision(12L) + .setScale(9L) + .build(); + private static final FieldList fieldList = + FieldList.of( + Field.of("first", StandardSQLTypeName.BOOL), + Field.of("second", StandardSQLTypeName.INT64), + Field.of("third", StandardSQLTypeName.FLOAT64), + Field.of("fourth", StandardSQLTypeName.STRING), + Field.of("fifth", StandardSQLTypeName.TIMESTAMP), + Field.of("sixth", StandardSQLTypeName.BYTES), + Field.of("seventh", StandardSQLTypeName.STRING), + Field.newBuilder("eight", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REPEATED) + .build(), + Field.of( + "ninth", + StandardSQLTypeName.STRUCT, + Field.of("first", StandardSQLTypeName.FLOAT64), + Field.of("second", StandardSQLTypeName.TIMESTAMP)), + tenthField, + Field.of("eleventh", StandardSQLTypeName.BIGNUMERIC), + Field.of("twelfth", LegacySQLTypeName.TIME), + Field.of("thirteenth", LegacySQLTypeName.DATE)); + + private static final List fieldListSqlTypes = + ImmutableList.of( + Types.BOOLEAN, + Types.BIGINT, + Types.DOUBLE, + Types.NVARCHAR, + Types.TIMESTAMP, + Types.VARBINARY, + Types.NVARCHAR, + Types.ARRAY, + Types.STRUCT, + Types.NUMERIC, + Types.NUMERIC, + Types.TIME, + Types.DATE); + + private static final List fieldListClassNames = + ImmutableList.of( + "java.lang.Boolean", + "java.lang.Long", + "java.lang.Double", + "java.lang.String", + "java.sql.Timestamp", + byte[].class.getName(), + "java.lang.String", + Array.class.getName(), + "java.sql.Struct", + "java.math.BigDecimal", + "java.math.BigDecimal", + "java.sql.Time", + "java.sql.Date"); + private static final Schema QUERY_SCHEMA = Schema.of(fieldList); + + private ResultSetMetaData resultSetMetaData; + + private ResultSetMetaData resultSetMetaDataNested; + + @Before + public void setUp() throws SQLException { + statement = mock(BigQueryStatement.class); + Thread[] workerThreads = {new Thread()}; + BigQueryJsonResultSet bigQueryJsonResultSet = + BigQueryJsonResultSet.of(QUERY_SCHEMA, 1L, null, statement, workerThreads); + // values for nested types + resultSetMetaData = bigQueryJsonResultSet.getMetaData(); + + // values for nested types + Field fieldEight = fieldList.get("eight"); + // The schema for the nested result set should describe the elements of the array. + Field elementField = fieldEight.toBuilder().setMode(Field.Mode.NULLABLE).build(); + FieldList nestedFieldList = FieldList.of(elementField); + BigQueryFieldValueListWrapper bigQueryFieldValueListWrapperNested = + BigQueryFieldValueListWrapper.getNestedFieldValueListWrapper(nestedFieldList, null); + BigQueryJsonResultSet bigQueryJsonResultSetNested = + BigQueryJsonResultSet.getNestedResultSet( + Schema.of(nestedFieldList), bigQueryFieldValueListWrapperNested, -1, -1); + resultSetMetaDataNested = bigQueryJsonResultSetNested.getMetaData(); + } + + @Test + public void testGetColumnType() throws SQLException { + // match the mapping for all the types in the test dataset + for (int colIndex = 1; colIndex <= 13; colIndex++) { + assertThat(resultSetMetaData.getColumnType(colIndex)) + .isEqualTo(fieldListSqlTypes.get(colIndex - 1)); + } + } + + @Test + public void testGetColumnTypeName() throws SQLException { + assertThat(resultSetMetaData.getColumnTypeName(1)).isEqualTo("BOOL"); + assertThat(resultSetMetaData.getColumnTypeName(2)).isEqualTo("INT64"); + assertThat(resultSetMetaData.getColumnTypeName(3)).isEqualTo("FLOAT64"); + assertThat(resultSetMetaData.getColumnTypeName(4)).isEqualTo("STRING"); + assertThat(resultSetMetaData.getColumnTypeName(5)).isEqualTo("TIMESTAMP"); + assertThat(resultSetMetaData.getColumnTypeName(6)).isEqualTo("BYTES"); + assertThat(resultSetMetaData.getColumnTypeName(7)).isEqualTo("STRING"); + assertThat(resultSetMetaData.getColumnTypeName(8)).isEqualTo("ARRAY"); + assertThat(resultSetMetaData.getColumnTypeName(9)).isEqualTo("STRUCT"); + assertThat(resultSetMetaData.getColumnTypeName(10)).isEqualTo("NUMERIC"); + assertThat(resultSetMetaData.getColumnTypeName(11)).isEqualTo("BIGNUMERIC"); + assertThat(resultSetMetaData.getColumnTypeName(12)).isEqualTo("TIME"); + assertThat(resultSetMetaData.getColumnTypeName(13)).isEqualTo("DATE"); + } + + @Test + public void testColumnClassName() + throws SQLException { // match the mapping for all the types in the test dataset + for (int colIndex = 1; colIndex <= 13; colIndex++) { + assertThat(resultSetMetaData.getColumnClassName(colIndex)) + .isEqualTo(fieldListClassNames.get(colIndex - 1)); + } + } + + @Test + public void testResultSetMetadataProperties() throws SQLException { + assertThat(resultSetMetaData).isNotNull(); + assertThat(resultSetMetaData.getColumnCount()).isEqualTo(13); + assertThat(resultSetMetaData.isAutoIncrement(1)).isFalse(); + assertThat(resultSetMetaData.isSearchable(4)).isTrue(); + assertThat(resultSetMetaData.isCurrency(4)).isFalse(); + assertThat(resultSetMetaData.isReadOnly(4)).isFalse(); + assertThat(resultSetMetaData.isDefinitelyWritable(4)).isFalse(); + assertThat(resultSetMetaData.isWritable(4)).isTrue(); + assertThat(resultSetMetaData.isNullable(4)).isEqualTo(ResultSetMetaData.columnNullableUnknown); + } + + @Test + public void testPrecision() throws SQLException { + assertThat(resultSetMetaData.getPrecision(10)).isEqualTo(12L); + assertThat(resultSetMetaData.getPrecision(1)) + .isEqualTo(0); // schema doesn't have this info, should be defaulted to 0 + } + + @Test + public void testSigned() throws SQLException { + assertThat(resultSetMetaData.isSigned(4)).isFalse(); + assertThat(resultSetMetaData.isSigned(2)).isTrue(); + } + + @Test + public void testCheckNameLabelCatalog() throws SQLException { + assertThat(resultSetMetaData.getColumnLabel(1)).isEqualTo("first"); + assertThat(resultSetMetaData.getColumnName(10)).isEqualTo("tenth"); + assertThat(resultSetMetaData.getColumnName(10)).isEqualTo("tenth"); + assertThat(resultSetMetaData.getSchemaName(10)).isEqualTo(""); + assertThat(resultSetMetaData.getCatalogName(10)).isEqualTo(""); + } + + @Test + public void testCheckCaseSensitive() throws SQLException { + assertThat(resultSetMetaData.isCaseSensitive(2)).isFalse(); + assertThat(resultSetMetaData.isCaseSensitive(4)).isTrue(); + } + + @Test + public void testScale() throws SQLException { + assertThat(resultSetMetaData.getScale(10)).isEqualTo(9L); + assertThat(resultSetMetaData.getScale(4)).isEqualTo(0L); + } + + @Test + public void testColumnDisplaySize() throws SQLException { + assertThat(resultSetMetaData.getColumnDisplaySize(1)).isEqualTo(5); + assertThat(resultSetMetaData.getColumnDisplaySize(13)).isEqualTo(10); + assertThat(resultSetMetaData.getColumnDisplaySize(2)).isEqualTo(10); + assertThat(resultSetMetaData.getColumnDisplaySize(3)).isEqualTo(14); + assertThat(resultSetMetaData.getColumnDisplaySize(12)).isEqualTo(50); + assertThat(resultSetMetaData.getColumnDisplaySize(5)).isEqualTo(16); + } + + // Nested Types + + @Test + public void testResultSetMetaDataNestedColType() throws SQLException { + assertThat(resultSetMetaDataNested).isNotNull(); + assertThat(resultSetMetaDataNested.getColumnType(1)).isEqualTo(Types.NVARCHAR); + assertThat(resultSetMetaDataNested.getColumnClassName(1)).isEqualTo("java.lang.String"); + } + + @Test + public void testNestedresultSetMetaDataNestedProperties() throws SQLException { + assertThat(resultSetMetaDataNested.getColumnCount()).isEqualTo(1); + assertThat(resultSetMetaDataNested.isAutoIncrement(1)).isFalse(); + assertThat(resultSetMetaDataNested.isSearchable(1)).isTrue(); + assertThat(resultSetMetaDataNested.isCurrency(1)).isFalse(); + assertThat(resultSetMetaDataNested.isReadOnly(1)).isFalse(); + assertThat(resultSetMetaDataNested.isDefinitelyWritable(1)).isFalse(); + assertThat(resultSetMetaDataNested.isWritable(1)).isTrue(); + assertThat(resultSetMetaDataNested.isNullable(1)).isEqualTo(ResultSetMetaData.columnNullable); + } + + @Test + public void testNestedPrecision() throws SQLException { + assertThat(resultSetMetaDataNested.getPrecision(1)) + .isEqualTo(0); // schema doesn't have this info, should be defaulted to 0 + } + + @Test + public void testNestedSigned() throws SQLException { + assertThat(resultSetMetaDataNested.isSigned(1)).isFalse(); + } + + @Test + public void testNestedCheckNameLabelCatalog() throws SQLException { + assertThat(resultSetMetaDataNested.getColumnLabel(1)).isEqualTo("eight"); + assertThat(resultSetMetaDataNested.getColumnName(1)).isEqualTo("eight"); + assertThat(resultSetMetaDataNested.getSchemaName(1)).isEqualTo(""); + assertThat(resultSetMetaDataNested.getCatalogName(1)).isEqualTo(""); + } + + @Test + public void testNestedCheckCaseSensitive() throws SQLException { + assertThat(resultSetMetaDataNested.isCaseSensitive(1)).isTrue(); + } + + @Test + public void testNestedScale() throws SQLException { + assertThat(resultSetMetaDataNested.getScale(1)).isEqualTo(0L); + } + + @Test + public void testNestedColumnDisplaySize() throws SQLException { + assertThat(resultSetMetaDataNested.getColumnDisplaySize(1)).isEqualTo(50); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryStatementTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryStatementTest.java new file mode 100644 index 0000000000..22dc072192 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryStatementTest.java @@ -0,0 +1,483 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.jdbc.utils.ArrowUtilities.serializeSchema; +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQuery.QueryResultsOption; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.JobStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.Priority; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableResult; +import com.google.cloud.bigquery.jdbc.BigQueryStatement.JobIdWrapper; +import com.google.cloud.bigquery.spi.BigQueryRpcFactory; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1.ReadSession; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; +import java.io.IOException; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.BlockingQueue; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.BitVector; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.IntVector; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.junit.Before; +import org.junit.Ignore; +import org.junit.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; + +public class BigQueryStatementTest { + + private BigQueryConnection bigQueryConnection; + private static final String PROJECT = "project"; + + private BigQueryRpcFactory rpcFactoryMock; + + private BigQueryReadClient storageReadClient; + + private BigQuery bigquery; + + private BigQueryStatement bigQueryStatement; + + private final String query = "select * from test"; + + private final String jobIdVal = UUID.randomUUID().toString(); + + private JobId jobId; + + private static final FieldList fieldList = + FieldList.of( + Field.of("first", StandardSQLTypeName.BOOL), + Field.of("second", StandardSQLTypeName.INT64)); + + private static final String DEFAULT_TEST_DATASET = "bigquery_test_dataset"; + + private static final TableId TABLE_ID = TableId.of(DEFAULT_TEST_DATASET, PROJECT); + + private static ArrowSchema arrowSchema; + + private final Map LABELS = + new HashMap() { + { + put("key1", "val1"); + put("key2", "val2"); + put("key3", "val3"); + } + }; + + private Job getJobMock( + TableResult result, QueryJobConfiguration configuration, StatementType type) + throws InterruptedException { + Job job = mock(Job.class); + JobStatistics.QueryStatistics statistics = mock(QueryStatistics.class); + JobId jobId = mock(JobId.class); + doReturn(result).when(job).getQueryResults(any(QueryResultsOption.class)); + doReturn(jobId).when(job).getJobId(); + doReturn(configuration).when(job).getConfiguration(); + doReturn(statistics).when(job).getStatistics(); + doReturn(type).when(statistics).getStatementType(); + return job; + } + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + rpcFactoryMock = mock(BigQueryRpcFactory.class); + bigquery = mock(BigQuery.class); + bigQueryConnection.bigQuery = bigquery; + storageReadClient = mock(BigQueryReadClient.class); + jobId = JobId.newBuilder().setJob(jobIdVal).build(); + + doReturn(bigquery).when(bigQueryConnection).getBigQuery(); + doReturn(10L).when(bigQueryConnection).getJobTimeoutInSeconds(); + doReturn(10L).when(bigQueryConnection).getMaxBytesBilled(); + doReturn(LABELS).when(bigQueryConnection).getLabels(); + doReturn(BigQueryJdbcUrlUtility.DEFAULT_QUERY_DIALECT_VALUE) + .when(bigQueryConnection) + .getQueryDialect(); + doReturn(1000L).when(bigQueryConnection).getMaxResults(); + bigQueryStatement = new BigQueryStatement(bigQueryConnection); + VectorSchemaRoot vectorSchemaRoot = getTestVectorSchemaRoot(); + arrowSchema = + ArrowSchema.newBuilder() + .setSerializedSchema(serializeSchema(vectorSchemaRoot.getSchema())) + .build(); + // bigQueryConnection.addOpenStatements(bigQueryStatement); + + } + + private VectorSchemaRoot getTestVectorSchemaRoot() { + RootAllocator allocator = new RootAllocator(); + BitVector boolField = + new BitVector("boolField", allocator); // Mapped with StandardSQLTypeName.BOOL + boolField.allocateNew(2); + boolField.set(0, 0); + boolField.setValueCount(1); + IntVector int64Filed = + new IntVector("int64Filed", allocator); // Mapped with StandardSQLTypeName.INT64 + int64Filed.allocateNew(2); + int64Filed.set(0, 1); + int64Filed.setValueCount(1); + List fieldVectors = ImmutableList.of(boolField, int64Filed); + return new VectorSchemaRoot(fieldVectors); + } + + private BigQueryOptions createBigQueryOptionsForProject( + String project, BigQueryRpcFactory rpcFactory) { + return BigQueryOptions.newBuilder() + .setProjectId(project) + .setServiceRpcFactory(rpcFactory) + .setRetrySettings(ServiceOptions.getNoRetrySettings()) + .build(); + } + + @Test + public void testStatementNonNull() { + assertThat(bigQueryStatement).isNotNull(); + } + + @Ignore + public void testExecFastQueryPath() throws SQLException, InterruptedException { + JobIdWrapper jobIdWrapper = new JobIdWrapper(jobId, null, null); + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + + TableResult result = Mockito.mock(TableResult.class); + BigQueryJsonResultSet jsonResultSet = mock(BigQueryJsonResultSet.class); + QueryJobConfiguration jobConfiguration = QueryJobConfiguration.newBuilder(query).build(); + + doReturn(result).when(bigquery).query(jobConfiguration); + doReturn(jsonResultSet).when(bigQueryStatementSpy).processJsonResultSet(result); + + bigQueryStatementSpy.runQuery(query, jobConfiguration); + // verify the statement's state + assertThat(bigQueryStatementSpy.jobIds.size()).isEqualTo(1); // job id should be created + assertThat(bigQueryStatementSpy.jobIds.get(0)).isNotNull(); + // assertThat(bigQueryStatementSpy.currentResultSet).isNotNull(); + + } + + @Test + public void testExecSlowQueryPath() throws SQLException, InterruptedException { + JobIdWrapper jobIdWrapper = new JobIdWrapper(jobId, null, null); + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + TableResult tableResult = mock(TableResult.class); + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(query) + .setPriority(Priority.BATCH) // query settings for slow query path + .build(); + Job job = getJobMock(tableResult, queryJobConfiguration, StatementType.SELECT); + + doReturn(job).when(bigquery).create(any(JobInfo.class)); + + doReturn(jobIdWrapper) + .when(bigQueryStatementSpy) + .insertJob(any(com.google.cloud.bigquery.JobConfiguration.class)); + doReturn(false).when(bigQueryStatementSpy).useReadAPI(eq(tableResult)); + doReturn(mock(JobId.class)).when(tableResult).getJobId(); + + ResultSet bigQueryJsonResultSet = mock(BigQueryJsonResultSet.class); + + doReturn(bigQueryJsonResultSet) + .when(bigQueryStatementSpy) + .processJsonResultSet(any(TableResult.class)); + + bigQueryStatementSpy.runQuery(query, queryJobConfiguration); + // verify the statement's state + // job id is created during runQuery, but cleaned up after function completes. + assertThat(bigQueryStatementSpy.jobIds.size()).isEqualTo(0); + assertThat(bigQueryStatementSpy.getResultSet()).isEqualTo(bigQueryJsonResultSet); + } + + @Test + public void getArrowResultSetTest() throws SQLException { + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + BigQueryReadClient bigQueryReadClient = Mockito.spy(mock(BigQueryReadClient.class)); + Schema schema = Schema.of(fieldList); + ReadSession readSession = ReadSession.getDefaultInstance(); + doReturn(bigQueryReadClient).when(bigQueryStatementSpy).getBigQueryReadClient(); + doReturn(readSession) + .when(bigQueryStatementSpy) + .getReadSession(any(CreateReadSessionRequest.class)); + Thread mockWorker = new Thread(); + doReturn(mockWorker) + .when(bigQueryStatementSpy) + .populateArrowBufferedQueue( + any(ReadSession.class), any(BlockingQueue.class), any(BigQueryReadClient.class)); + + doReturn(arrowSchema).when(bigQueryStatementSpy).getArrowSchema(any(ReadSession.class)); + + JobId jobId = JobId.of("123"); + TableResult result = Mockito.mock(TableResult.class); + doReturn(schema).when(result).getSchema(); + doReturn(10L).when(result).getTotalRows(); + doReturn(TABLE_ID).when(bigQueryStatementSpy).getDestinationTable(any()); + doReturn(jobId).when(result).getJobId(); + Job job = mock(Job.class); + doReturn(mock(QueryStatistics.class)).when(job).getStatistics(); + doReturn(job).when(bigquery).getJob(jobId); + + ResultSet resultSet = bigQueryStatementSpy.processArrowResultSet(result); + assertThat(resultSet).isNotNull(); + assertThat(resultSet).isInstanceOf(BigQueryArrowResultSet.class); + assertThat(resultSet.isLast()).isFalse(); // as we have 10 rows + } + + @Test + public void getJobTimeoutTest() throws Exception { + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + assertEquals(10000L, jobConfig.getJobTimeoutMs().longValue()); + } + + @Test + public void getMaxBytesBilledTest() throws Exception { + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + assertEquals(10L, jobConfig.getMaximumBytesBilled().longValue()); + } + + @Test + public void testSetMaxRowsJson() throws SQLException { + assertEquals(0, bigQueryStatement.getMaxRows()); + bigQueryStatement.setMaxRows(10); + assertEquals(10, bigQueryStatement.getMaxRows()); + } + + @Test + public void setQueryTimeoutTest() throws Exception { + bigQueryStatement.setQueryTimeout(3); + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + + TableResult result = Mockito.mock(TableResult.class); + BigQueryJsonResultSet jsonResultSet = mock(BigQueryJsonResultSet.class); + QueryJobConfiguration jobConfiguration = + QueryJobConfiguration.newBuilder(query).setJobTimeoutMs(10000L).build(); + + Job job = getJobMock(result, jobConfiguration, StatementType.SELECT); + doReturn(job).when(bigquery).create(any(JobInfo.class)); + + doReturn(jsonResultSet).when(bigQueryStatementSpy).processJsonResultSet(result); + ArgumentCaptor captor = ArgumentCaptor.forClass(JobInfo.class); + + bigQueryStatementSpy.runQuery(query, jobConfiguration); + verify(bigquery).create(captor.capture()); + QueryJobConfiguration jobConfig = captor.getValue().getConfiguration(); + assertEquals(3000L, jobConfig.getJobTimeoutMs().longValue()); + } + + @Test + public void getLabelsTest() throws Exception { + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + Map expected = + new HashMap() { + { + put("key1", "val1"); + put("key2", "val2"); + put("key3", "val3"); + } + }; + + assertTrue(Maps.difference(expected, jobConfig.getLabels()).areEqual()); + } + + @Test + public void getUpdateCountTest() { + bigQueryStatement.setUpdateCount(100L); + assertEquals(100, bigQueryStatement.getUpdateCount()); + assertEquals(100L, bigQueryStatement.getLargeUpdateCount()); + } + + @Test + public void testSetExtraLabels() { + Map extraLabels = new HashMap<>(); + extraLabels.put("extraKey1", "extraVal1"); + bigQueryStatement.setExtraLabels(extraLabels); + assertEquals(extraLabels, bigQueryStatement.getExtraLabels()); + } + + @Test + public void testGetJobConfigWithExtraLabels() { + Map extraLabels = new HashMap<>(); + extraLabels.put("extraKey1", "extraVal1"); + extraLabels.put("key1", "overrideVal1"); // Override connection label + bigQueryStatement.setExtraLabels(extraLabels); + + QueryJobConfiguration jobConfig = bigQueryStatement.getJobConfig("select 1").build(); + Map expectedLabels = new HashMap<>(); + expectedLabels.put("key1", "overrideVal1"); + expectedLabels.put("key2", "val2"); + expectedLabels.put("key3", "val3"); + expectedLabels.put("extraKey1", "extraVal1"); + + assertTrue(Maps.difference(expectedLabels, jobConfig.getLabels()).areEqual()); + } + + @Test + public void testJoblessQuery() throws SQLException, InterruptedException { + // 1. Test JobCreationMode=2 (jobless) + doReturn(true).when(bigQueryConnection).getUseStatelessQueryMode(); + BigQueryStatement joblessStatement = new BigQueryStatement(bigQueryConnection); + BigQueryStatement joblessStatementSpy = Mockito.spy(joblessStatement); + + TableResult tableResultMock = mock(TableResult.class); + doReturn("queryId").when(tableResultMock).getQueryId(); + doReturn(null).when(tableResultMock).getJobId(); + doReturn(tableResultMock) + .when(bigquery) + .queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + doReturn(mock(BigQueryJsonResultSet.class)) + .when(joblessStatementSpy) + .processJsonResultSet(tableResultMock); + + Job dryRunJobMock = getJobMock(null, null, StatementType.SELECT); + ArgumentCaptor dryRunCaptor = ArgumentCaptor.forClass(JobInfo.class); + doReturn(dryRunJobMock).when(bigquery).create(dryRunCaptor.capture()); + + joblessStatementSpy.executeQuery("SELECT 1"); + + verify(bigquery).queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + verify(bigquery).create(any(JobInfo.class)); + assertTrue( + Boolean.TRUE.equals( + ((QueryJobConfiguration) dryRunCaptor.getValue().getConfiguration()).dryRun())); + + // 2. Test JobCreationMode=1 (jobful) + Mockito.reset(bigquery); + doReturn(false).when(bigQueryConnection).getUseStatelessQueryMode(); + BigQueryStatement jobfulStatement = new BigQueryStatement(bigQueryConnection); + BigQueryStatement jobfulStatementSpy = Mockito.spy(jobfulStatement); + + TableResult tableResultJobfulMock = mock(TableResult.class); + QueryJobConfiguration jobConf = QueryJobConfiguration.newBuilder("SELECT 1").build(); + Job jobMock = getJobMock(tableResultJobfulMock, jobConf, StatementType.SELECT); + ArgumentCaptor jobfulCaptor = ArgumentCaptor.forClass(JobInfo.class); + doReturn(jobMock).when(bigquery).create(jobfulCaptor.capture()); + doReturn(mock(BigQueryJsonResultSet.class)) + .when(jobfulStatementSpy) + .processJsonResultSet(tableResultJobfulMock); + + jobfulStatementSpy.executeQuery("SELECT 1"); + + verify(bigquery).create(any(JobInfo.class)); + assertTrue( + jobfulCaptor.getAllValues().stream() + .noneMatch( + jobInfo -> + Boolean.TRUE.equals( + ((QueryJobConfiguration) jobInfo.getConfiguration()).dryRun()))); + verify(bigquery, Mockito.never()) + .queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + } + + @Test + public void testCloseCancelsJob() throws SQLException, InterruptedException { + BigQueryStatement bigQueryStatementSpy = Mockito.spy(bigQueryStatement); + TableResult tableResult = mock(TableResult.class); + Schema mockSchema = Schema.of(FieldList.of()); + doReturn(mockSchema).when(tableResult).getSchema(); + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(query).setPriority(Priority.BATCH).build(); + Job job = getJobMock(tableResult, queryJobConfiguration, StatementType.SELECT); + + doReturn(job).when(bigquery).create(any(JobInfo.class)); + doReturn(false).when(bigQueryStatementSpy).useReadAPI(eq(tableResult)); + doReturn(mock(JobId.class)).when(tableResult).getJobId(); + Mockito.when(job.getQueryResults(any(QueryResultsOption.class))) + .thenAnswer( + invocation -> { + Thread.sleep(2000); + return null; + }); + Thread t = + new Thread( + () -> { + try { + bigQueryStatementSpy.runQuery(query, queryJobConfiguration); + } catch (Exception e) { + } + }); + + t.start(); + // Sleep to allow background thread to call "create". + Thread.sleep(500); + bigQueryStatementSpy.close(); + t.join(); + verify(bigquery, Mockito.times(1)).cancel(any(JobId.class)); + } + + @Test + public void testCancelWithJoblessQuery() throws SQLException, InterruptedException { + doReturn(true).when(bigQueryConnection).getUseStatelessQueryMode(); + BigQueryStatement joblessStatement = new BigQueryStatement(bigQueryConnection); + BigQueryStatement joblessStatementSpy = Mockito.spy(joblessStatement); + + TableResult tableResultMock = mock(TableResult.class); + doReturn(null).when(tableResultMock).getJobId(); + + doReturn(tableResultMock) + .when(bigquery) + .queryWithTimeout(any(QueryJobConfiguration.class), any(), any()); + + Job dryRunJobMock = getJobMock(null, null, StatementType.SELECT); + doReturn(dryRunJobMock).when(bigquery).create(any(JobInfo.class)); + + BigQueryJsonResultSet resultSetMock = mock(BigQueryJsonResultSet.class); + doReturn(resultSetMock).when(joblessStatementSpy).processJsonResultSet(tableResultMock); + + joblessStatementSpy.executeQuery("SELECT 1"); + + // Pre-check: statement has a result set + assertTrue(joblessStatementSpy.currentResultSet != null); + + joblessStatementSpy.cancel(); + + // Post-check: The result set's close() method was called + verify(resultSetMock).close(); + + // And no backend cancellation was attempted + verify(bigquery, Mockito.never()).cancel(any(JobId.class)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactoryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactoryTest.java new file mode 100644 index 0000000000..d271df0e26 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryThreadFactoryTest.java @@ -0,0 +1,41 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import java.util.concurrent.ThreadFactory; +import org.junit.Before; +import org.junit.Test; + +public class BigQueryThreadFactoryTest { + + private static ThreadFactory JDBC_THREAD_FACTORY; + + @Before + public void setUp() { + JDBC_THREAD_FACTORY = new BigQueryThreadFactory("BigQuery-Thread-"); + } + + @Test + public void testNewThread() { + assertThat(JDBC_THREAD_FACTORY).isNotNull(); + Thread thread = JDBC_THREAD_FACTORY.newThread(() -> {}); + assertThat(thread).isNotNull(); + assertThat(thread.getName()).startsWith("BigQuery-Thread-"); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerTest.java new file mode 100644 index 0000000000..a758cf15c0 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/BigQueryTypeCoercerTest.java @@ -0,0 +1,100 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionNotFoundException; +import com.google.cloud.bigquery.jdbc.TestType.Text; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.function.Function; +import org.junit.Test; + +public class BigQueryTypeCoercerTest { + + @Test + public void shouldReturnSameValueWhenTargetTypeIsSameAsSourceType() { + assertThat(BigQueryTypeCoercer.INSTANCE.coerceTo(Integer.class, 56)).isEqualTo(56); + assertThat(BigQueryTypeCoercer.INSTANCE.coerceTo(Long.class, 56L)).isEqualTo(56L); + } + + @Test + public void shouldBeAbleToComposeMultipleCoercions() { + StringToBigDecimal stringToBigDecimal = new StringToBigDecimal(); + BigDecimalToBigInteger bigDecimalToBigInteger = new BigDecimalToBigInteger(); + + Function composedCoercion = + stringToBigDecimal.andThen(bigDecimalToBigInteger); + + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder() + .registerTypeCoercion(composedCoercion, String.class, BigInteger.class) + .build(); + + assertThat(bigQueryTypeCoercer.coerceTo(BigInteger.class, "123567.66884")) + .isEqualTo(BigInteger.valueOf(123567)); + } + + @Test + public void shouldThrowCoercionNotFoundException() { + byte[] bytesArray = {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33}; + Text text = new Text(bytesArray); + + BigQueryJdbcCoercionNotFoundException exception = + assertThrows( + BigQueryJdbcCoercionNotFoundException.class, + () -> BigQueryTypeCoercer.INSTANCE.coerceTo(Long.class, text)); + assertThat(exception.getMessage()) + .isEqualTo( + "Coercion not found for " + + "[com.google.cloud.bigquery.jdbc.TestType.Text -> java.lang.Long]" + + " conversion"); + } + + @Test + public void shouldThrowCoercionException() { + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder() + .registerTypeCoercion(Math::toIntExact, Long.class, Integer.class) + .build(); + BigQueryJdbcCoercionException exception = + assertThrows( + BigQueryJdbcCoercionException.class, + () -> bigQueryTypeCoercer.coerceTo(Integer.class, 2147483648L)); + assertThat(exception.getMessage()).isEqualTo("Coercion error"); + assertThat(exception.getCause()).isInstanceOf(ArithmeticException.class); + } + + private static class StringToBigDecimal implements BigQueryCoercion { + + @Override + public BigDecimal coerce(String value) { + return new BigDecimal(value); + } + } + + private static class BigDecimalToBigInteger implements BigQueryCoercion { + + @Override + public BigInteger coerce(BigDecimal value) { + return value.toBigInteger(); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/FieldValueTypeBigQueryCoercionUtilityTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/FieldValueTypeBigQueryCoercionUtilityTest.java new file mode 100644 index 0000000000..456b750ca5 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/FieldValueTypeBigQueryCoercionUtilityTest.java @@ -0,0 +1,378 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.cloud.bigquery.FieldValue.Attribute.PRIMITIVE; +import static com.google.cloud.bigquery.FieldValue.Attribute.RANGE; +import static com.google.cloud.bigquery.FieldValue.Attribute.RECORD; +import static com.google.cloud.bigquery.FieldValue.Attribute.REPEATED; +import static com.google.cloud.bigquery.jdbc.BigQueryTypeCoercionUtility.INSTANCE; +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertThrows; + +import com.google.cloud.bigquery.FieldElementType; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.Range; +import com.google.cloud.bigquery.exception.BigQueryJdbcCoercionException; +import com.google.common.collect.ImmutableList; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.Instant; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.ZoneId; +import java.time.temporal.ChronoUnit; +import java.util.concurrent.TimeUnit; +import org.junit.Test; + +public class FieldValueTypeBigQueryCoercionUtilityTest { + private static final FieldValue STRING_VALUE = FieldValue.of(PRIMITIVE, "sample-string"); + private static final FieldValue INTEGER_VALUE = FieldValue.of(PRIMITIVE, "345"); + private static final FieldValue FLOAT_VALUE = FieldValue.of(PRIMITIVE, "345.21"); + private static final FieldValue SHORT_VALUE = FieldValue.of(PRIMITIVE, "345"); + private static final FieldValue LONG_VALUE = FieldValue.of(PRIMITIVE, "4374218905"); + private static final FieldValue DOUBLE_VALUE = FieldValue.of(PRIMITIVE, "56453.458393"); + private static final FieldValue BIG_DECIMAL_VALUE = FieldValue.of(PRIMITIVE, "56453.458393"); + private static final FieldValue BASE64_ENCODED_VALUE = + FieldValue.of(PRIMITIVE, "SGVsbG8gV29ybGQK"); // Hello World! + private static final FieldValue TIMESTAMP_VALUE = FieldValue.of(PRIMITIVE, "1408452095.22"); + private static final FieldValue DATE_VALUE = FieldValue.of(PRIMITIVE, "2023-03-13"); + private static final FieldValue TIME_VALUE = FieldValue.of(PRIMITIVE, "23:59:59"); + private static final FieldValue TIME_WITH_NANOSECOND_VALUE = + FieldValue.of(PRIMITIVE, "23:59:59.99999"); + private static final FieldValue TRUE_VALUE = FieldValue.of(PRIMITIVE, "true"); + private static final FieldValue FALSE_VALUE = FieldValue.of(PRIMITIVE, "false"); + private static final FieldValue NULL_VALUE = FieldValue.of(PRIMITIVE, null); + private static final FieldValue INTEGER_ARRAY = + FieldValue.of( + REPEATED, + FieldValueList.of( + ImmutableList.of(FieldValue.of(PRIMITIVE, 1), FieldValue.of(PRIMITIVE, 2)))); + private static final FieldValue RECORD_VALUE = + FieldValue.of( + RECORD, ImmutableList.of(INTEGER_VALUE, STRING_VALUE, TIME_VALUE, INTEGER_ARRAY)); + + private static final Range RANGE_DATE = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .setStart("1970-01-02") + .setEnd("1970-03-04") + .build(); + + private static final Range RANGE_DATETIME = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .setStart("2014-08-19 05:41:35.220000") + .setEnd("2015-09-20 06:41:35.220000") + .build(); + + private static final Range RANGE_TIMESTAMP = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd("2015-09-20 13:41:35.220000+01:00") + .build(); + + private static final FieldValue RANGE_DATE_VALUE = FieldValue.of(RANGE, RANGE_DATE); + private static final FieldValue RANGE_DATE_TIME_VALUE = FieldValue.of(RANGE, RANGE_DATETIME); + private static final FieldValue RANGE_TIMESTAMP_VALUE = FieldValue.of(RANGE, RANGE_TIMESTAMP); + + @Test + public void fieldValueToStringRangeDate() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATE.getStart().getStringValue(), RANGE_DATE.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE_VALUE)).isEqualTo(expectedRangeDate); + } + + @Test + public void rangeDateToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATE.getStart().getStringValue(), RANGE_DATE.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE)).isEqualTo(expectedRangeDate); + } + + @Test + public void fieldValueToStringRangeDatetime() { + String expectedRangeDatetime = + String.format( + "[%s, %s)", + RANGE_DATETIME.getStart().getStringValue(), RANGE_DATETIME.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATE_TIME_VALUE)) + .isEqualTo(expectedRangeDatetime); + } + + @Test + public void rangeDatetimeToString() { + String expectedRangeDate = + String.format( + "[%s, %s)", + RANGE_DATETIME.getStart().getStringValue(), RANGE_DATETIME.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_DATETIME)).isEqualTo(expectedRangeDate); + } + + @Test + public void fieldValueToStringRangeTimestamp() { + String expectedRangeTimestamp = + String.format( + "[%s, %s)", + RANGE_TIMESTAMP.getStart().getStringValue(), RANGE_TIMESTAMP.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_TIMESTAMP_VALUE)) + .isEqualTo(expectedRangeTimestamp); + } + + @Test + public void rangeTimestampToString() { + String expectedRangeTimestamp = + String.format( + "[%s, %s)", + RANGE_TIMESTAMP.getStart().getStringValue(), RANGE_TIMESTAMP.getEnd().getStringValue()); + assertThat(INSTANCE.coerceTo(String.class, RANGE_TIMESTAMP)).isEqualTo(expectedRangeTimestamp); + } + + @Test + public void fieldValueToString() { + assertThat(INSTANCE.coerceTo(String.class, STRING_VALUE)).isEqualTo("sample-string"); + } + + @Test + public void fieldValueToStringWhenNull() { + assertThat(INSTANCE.coerceTo(String.class, null)).isNull(); + } + + @Test + public void fieldValueToStringWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(String.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToStringWhenInnerValueIsAnArray() { + assertThat(INSTANCE.coerceTo(String.class, INTEGER_ARRAY)) + .isEqualTo( + "[FieldValue{attribute=PRIMITIVE, value=1, useInt64Timestamps=false}, FieldValue{attribute=PRIMITIVE, value=2, useInt64Timestamps=false}]"); + } + + @Test + public void fieldValueToInteger() { + assertThat(INSTANCE.coerceTo(Integer.class, INTEGER_VALUE)).isEqualTo(345); + } + + @Test + public void fieldValueToIntegerWhenNull() { + assertThat(INSTANCE.coerceTo(Integer.class, null)).isEqualTo(0); + } + + @Test + public void fieldValueToIntegerWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Integer.class, NULL_VALUE)).isEqualTo(0); + } + + @Test + public void fieldValueToFloat() { + assertThat(INSTANCE.coerceTo(Float.class, FLOAT_VALUE)).isEqualTo(345.21f); + } + + @Test + public void fieldValueToFloatWhenNull() { + assertThat(INSTANCE.coerceTo(Float.class, null)).isEqualTo(0f); + } + + @Test + public void fieldValueToFloatWhenInnerValueNull() { + assertThat(INSTANCE.coerceTo(Float.class, NULL_VALUE)).isEqualTo(0f); + } + + @Test + public void fieldValueToShort() { + assertThat(INSTANCE.coerceTo(Short.class, SHORT_VALUE)).isEqualTo((short) 345); + } + + @Test + public void fieldValueToShortWhenNull() { + assertThat(INSTANCE.coerceTo(Short.class, null)).isEqualTo((short) 0); + } + + @Test + public void fieldValueToShortWhenInnerValueNull() { + assertThat(INSTANCE.coerceTo(Short.class, NULL_VALUE)).isEqualTo((short) 0); + } + + @Test + public void fieldValueToLong() { + assertThat(INSTANCE.coerceTo(Long.class, LONG_VALUE)).isEqualTo(4374218905L); + } + + @Test + public void fieldValueToLongWhenNull() { + assertThat(INSTANCE.coerceTo(Long.class, null)).isEqualTo(0L); + } + + @Test + public void fieldValueToLongWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Long.class, NULL_VALUE)).isEqualTo(0L); + } + + @Test + public void fieldValueToDouble() { + assertThat(INSTANCE.coerceTo(Double.class, DOUBLE_VALUE)).isEqualTo(56453.458393D); + } + + @Test + public void fieldValueToDoubleWhenNull() { + assertThat(INSTANCE.coerceTo(Double.class, null)).isEqualTo(0D); + } + + @Test + public void fieldValueToDoubleWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Double.class, NULL_VALUE)).isEqualTo(0D); + } + + @Test + public void fieldValueToBigDecimal() { + assertThat(INSTANCE.coerceTo(BigDecimal.class, BIG_DECIMAL_VALUE)) + .isEqualTo(new BigDecimal("56453.458393")); + } + + @Test + public void fieldValueToBigDecimalWhenNull() { + assertThat(INSTANCE.coerceTo(BigDecimal.class, null)).isNull(); + } + + @Test + public void fieldValueToBigDecimalWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(BigDecimal.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToBoolean() { + assertThat(INSTANCE.coerceTo(Boolean.class, TRUE_VALUE)).isTrue(); + assertThat(INSTANCE.coerceTo(Boolean.class, FALSE_VALUE)).isFalse(); + } + + @Test + public void fieldValueToBooleanWhenNull() { + assertThat(INSTANCE.coerceTo(Boolean.class, null)).isFalse(); + } + + @Test + public void fieldValueToBooleanWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Boolean.class, NULL_VALUE)).isFalse(); + } + + @Test + public void fieldValueToBytesArray() { + assertThat(INSTANCE.coerceTo(byte[].class, BASE64_ENCODED_VALUE)) + .isEqualTo(new byte[] {72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 10}); + } + + @Test + public void fieldValueToBytesArrayWhenNull() { + assertThat(INSTANCE.coerceTo(byte[].class, null)).isNull(); + } + + @Test + public void fieldValueToBytesArrayWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(byte[].class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToTimestamp() { + Instant instant = Instant.EPOCH.plus(TIMESTAMP_VALUE.getTimestampValue(), ChronoUnit.MICROS); + LocalDateTime localDateTime = LocalDateTime.ofInstant(instant, ZoneId.of("UTC")); + assertThat(INSTANCE.coerceTo(Timestamp.class, TIMESTAMP_VALUE)) + .isEqualTo(Timestamp.valueOf(localDateTime)); + } + + @Test + public void fieldValueToTimestampWhenNull() { + assertThat(INSTANCE.coerceTo(Timestamp.class, null)).isNull(); + } + + @Test + public void fieldValueToTimestampWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Timestamp.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToTime() { + LocalTime expectedTime = LocalTime.of(23, 59, 59); + assertThat(INSTANCE.coerceTo(Time.class, TIME_VALUE)) + .isEqualTo(new Time(TimeUnit.NANOSECONDS.toMillis(expectedTime.toNanoOfDay()))); + LocalTime expectedTimeWithNanos = LocalTime.parse("23:59:59.99999"); + assertThat(INSTANCE.coerceTo(Time.class, TIME_WITH_NANOSECOND_VALUE)) + .isEqualTo(new Time(TimeUnit.NANOSECONDS.toMillis(expectedTimeWithNanos.toNanoOfDay()))); + } + + @Test + public void fieldValueToTimeWhenNull() { + assertThat(INSTANCE.coerceTo(Time.class, null)).isNull(); + } + + @Test + public void fieldValueToTimeWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Time.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToTimeWithInvalidValue() { + FieldValue invalidTime = FieldValue.of(PRIMITIVE, "99:99:99"); + + BigQueryJdbcCoercionException coercionException = + assertThrows( + BigQueryJdbcCoercionException.class, () -> INSTANCE.coerceTo(Time.class, invalidTime)); + assertThat(coercionException.getCause()).isInstanceOf(IllegalArgumentException.class); + } + + @Test + public void fieldValueToDate() { + LocalDate expectedDate = LocalDate.of(2023, 3, 13); + assertThat(INSTANCE.coerceTo(Date.class, DATE_VALUE)).isEqualTo(Date.valueOf(expectedDate)); + } + + @Test + public void fieldValueToDateWhenNull() { + assertThat(INSTANCE.coerceTo(Date.class, null)).isNull(); + } + + @Test + public void fieldValueToDateWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Date.class, NULL_VALUE)).isNull(); + } + + @Test + public void fieldValueToObject() { + assertThat(INSTANCE.coerceTo(Object.class, RECORD_VALUE)) + .isEqualTo(ImmutableList.of(INTEGER_VALUE, STRING_VALUE, TIME_VALUE, INTEGER_ARRAY)); + } + + @Test + public void fieldValueToObjectWhenNull() { + assertThat(INSTANCE.coerceTo(Object.class, null)).isNull(); + } + + @Test + public void fieldValueToObjectWhenInnerValueIsNull() { + assertThat(INSTANCE.coerceTo(Object.class, NULL_VALUE)).isNull(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/NullHandlingTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/NullHandlingTest.java new file mode 100644 index 0000000000..ab274c8797 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/NullHandlingTest.java @@ -0,0 +1,44 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static com.google.common.truth.Truth.assertThat; + +import org.junit.Test; + +public class NullHandlingTest { + + @Test + public void shouldReturnNullForNullByDefault() { + assertThat(BigQueryTypeCoercer.INSTANCE.coerceTo(Integer.class, null)).isNull(); + } + + @Test + public void shouldReturnCustomValueForNull() { + BigQueryTypeCoercer bigQueryTypeCoercer = + new BigQueryTypeCoercerBuilder().registerTypeCoercion(new NullToIntegerCoercion()).build(); + + assertThat(bigQueryTypeCoercer.coerceTo(Integer.class, null)).isEqualTo(0); + } + + private static class NullToIntegerCoercion implements BigQueryCoercion { + @Override + public Integer coerce(Void value) { + return 0; // returning zero as the default value + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSourceTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSourceTest.java new file mode 100644 index 0000000000..e3ef8a27f6 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionDataSourceTest.java @@ -0,0 +1,59 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import java.sql.SQLException; +import javax.sql.PooledConnection; +import org.junit.Test; + +public class PooledConnectionDataSourceTest { + private static final Long LISTENER_POOL_SIZE = 20L; + private static final Long CONNECTION_POOL_SIZE = 20L; + private static final Long DEFAULT_LISTENER_POOL_SIZE = 10L; + private static final Long DEFAULT_CONNECTION_POOL_SIZE = 10L; + + @Test + public void testGetPooledConnection() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + BigQueryConnection bqConnection = mock(BigQueryConnection.class); + doReturn(connectionUrl).when(bqConnection).getConnectionUrl(); + doReturn(LISTENER_POOL_SIZE).when(bqConnection).getListenerPoolSize(); + doReturn(CONNECTION_POOL_SIZE).when(bqConnection).getConnectionPoolSize(); + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setConnection(bqConnection); + pooledDataSource.setListenerPoolSize(LISTENER_POOL_SIZE); + pooledDataSource.setConnectionPoolSize(CONNECTION_POOL_SIZE); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + assertTrue(pooledConnection instanceof BigQueryPooledConnection); + BigQueryPooledConnection bqPooledConnection = (BigQueryPooledConnection) pooledConnection; + assertEquals(LISTENER_POOL_SIZE, bqPooledConnection.getListenerPoolSize()); + assertNotNull(pooledDataSource.getConnectionPoolManager()); + assertEquals( + CONNECTION_POOL_SIZE, pooledDataSource.getConnectionPoolManager().getConnectionPoolSize()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionListenerTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionListenerTest.java new file mode 100644 index 0000000000..2d1553f655 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/PooledConnectionListenerTest.java @@ -0,0 +1,176 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; + +import java.io.IOException; +import java.sql.Connection; +import java.sql.SQLException; +import org.junit.Before; +import org.junit.Test; + +public class PooledConnectionListenerTest { + private BigQueryConnection bigQueryConnection; + private static final Long LISTENER_POOL_SIZE = 10L; + private static final Long CONNECTION_POOL_SIZE = 10L; + + @Before + public void setUp() throws IOException, SQLException { + bigQueryConnection = mock(BigQueryConnection.class); + // Stub the listener pool size + doReturn(LISTENER_POOL_SIZE).when(bigQueryConnection).getListenerPoolSize(); + doReturn(CONNECTION_POOL_SIZE).when(bigQueryConnection).getConnectionPoolSize(); + } + + @Test + public void testAddConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + assertNull(listener.getPooledConnection()); + } + + @Test + public void testRemoveConnectionListener() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + assertTrue(pooledConnection.isListenerPooled(listener)); + pooledConnection.removeConnectionEventListener(listener); + assertFalse(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testConnectionHandleClosedByConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertTrue(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testConnectionHandleClosedByPooledConnection() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.close(); + assertFalse(pooledConnection.inUse()); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertTrue(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testFireConnectionError() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.fireConnectionError(new SQLException("test")); + assertFalse(pooledConnection.inUse()); + assertTrue(listener.isConnectionPoolEmpty()); + assertFalse(pooledConnection.isListenerPooled(listener)); + } + + @Test + public void testGetConnectionWhenPoolEmpty() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + assertNull(listener.getPooledConnection()); + } + + @Test + public void testGetConnectionWhenPoolNonEmpty() throws SQLException { + BigQueryPooledConnection pooledConnection = new BigQueryPooledConnection(bigQueryConnection); + assertFalse(pooledConnection.inUse()); + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertTrue(pooledConnection.inUse()); + + PooledConnectionListener listener = new PooledConnectionListener(LISTENER_POOL_SIZE); + assertEquals(CONNECTION_POOL_SIZE, listener.getConnectionPoolSize()); + assertEquals(LISTENER_POOL_SIZE, pooledConnection.getListenerPoolSize()); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + connection.close(); + assertFalse(pooledConnection.inUse()); + assertFalse(listener.isConnectionPoolEmpty()); + assertTrue(pooledConnection.isListenerPooled(listener)); + + assertNotNull(listener.getPooledConnection()); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/TestType.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/TestType.java new file mode 100644 index 0000000000..3222525c5e --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/TestType.java @@ -0,0 +1,31 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc; + +public class TestType { + public static class Text { + private final byte[] bytes; + + public Text(byte[] bytes) { + this.bytes = bytes; + } + + public byte[] getBytes() { + return bytes; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBase.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBase.java new file mode 100644 index 0000000000..5aa41b2975 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBase.java @@ -0,0 +1,21 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import com.google.cloud.bigquery.jdbc.BigQueryJdbcBaseTest; + +public class ITBase extends BigQueryJdbcBaseTest {} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBigQueryJDBCTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBigQueryJDBCTest.java new file mode 100644 index 0000000000..22f5c1c763 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITBigQueryJDBCTest.java @@ -0,0 +1,4439 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryError; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcRuntimeException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlFeatureNotSupportedException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlSyntaxErrorException; +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import com.google.cloud.bigquery.jdbc.BigQueryDriver; +import com.google.cloud.bigquery.jdbc.DataSource; +import com.google.cloud.bigquery.jdbc.PooledConnectionDataSource; +import com.google.cloud.bigquery.jdbc.PooledConnectionListener; +import com.google.cloud.bigquery.jdbc.utils.TestUtilities.TestConnectionListener; +import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonObject; +import com.google.gson.JsonParser; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.math.BigDecimal; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.sql.CallableStatement; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.Date; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.Instant; +import java.time.LocalTime; +import java.util.Arrays; +import java.util.Calendar; +import java.util.HashSet; +import java.util.Properties; +import java.util.Random; +import java.util.Set; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.function.BiFunction; +import javax.sql.PooledConnection; +import org.junit.AfterClass; +import org.junit.Assert; +import org.junit.BeforeClass; +import org.junit.Ignore; +import org.junit.Test; + +public class ITBigQueryJDBCTest extends ITBase { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + static Connection bigQueryConnection; + static BigQuery bigQuery; + static Statement bigQueryStatement; + static Connection bigQueryConnectionNoReadApi; + static Statement bigQueryStatementNoReadApi; + static final String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + static final String session_enabled_connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;EnableSession=1"; + private static final String BASE_QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 order by" + + " trip_distance asc LIMIT %s"; + private static final Random random = new Random(); + private static final int randomNumber = random.nextInt(9999); + private static final String DATASET = "JDBC_PRESUBMIT_INTEGRATION_DATASET"; + private static final String DATASET2 = "JDBC_PRESUBMIT_INTEGRATION_DATASET_2"; + private static final String CONSTRAINTS_DATASET = "JDBC_CONSTRAINTS_TEST_DATASET"; + private static final String CONSTRAINTS_TABLE_NAME = "JDBC_CONSTRAINTS_TEST_TABLE"; + private static final String CONSTRAINTS_TABLE_NAME2 = "JDBC_CONSTRAINTS_TEST_TABLE2"; + private static final String CONSTRAINTS_TABLE_NAME3 = "JDBC_CONSTRAINTS_TEST_TABLE3"; + private static final String CALLABLE_STMT_PROC_NAME = "IT_CALLABLE_STMT_PROC_TEST"; + private static final String CALLABLE_STMT_TABLE_NAME = "IT_CALLABLE_STMT_PROC_TABLE"; + private static final String CALLABLE_STMT_PARAM_KEY = "CALL_STMT_PARAM_KEY"; + private static final String CALLABLE_STMT_DML_INSERT_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_INSERT_TEST"; + private static final String CALLABLE_STMT_DML_UPDATE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_UPDATE_TEST"; + private static final String CALLABLE_STMT_DML_DELETE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_DELETE_TEST"; + private static final String CALLABLE_STMT_DML_TABLE_NAME = "IT_CALLABLE_STMT_PROC_DML_TABLE"; + private static final Long DEFAULT_CONN_POOL_SIZE = 10L; + private static final Long CUSTOM_CONN_POOL_SIZE = 5L; + private static final Object EXCEPTION_REPLACEMENT = "EXCEPTION-WAS-RAISED"; + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + private JsonObject getAuthJson() throws IOException { + final String secret = requireEnvVar("SA_SECRET"); + JsonObject authJson; + // Supporting both formats of SA_SECRET: + // - Local runs can point to a json file + // - Cloud Build has JSON value + try { + InputStream stream = Files.newInputStream(Paths.get(secret)); + InputStreamReader reader = new InputStreamReader(stream); + authJson = JsonParser.parseReader(reader).getAsJsonObject(); + } catch (IOException e) { + authJson = JsonParser.parseString(secret).getAsJsonObject(); + } + assertTrue(authJson.has("client_email")); + assertTrue(authJson.has("private_key")); + assertTrue(authJson.has("project_id")); + return authJson; + } + + private void validateConnection(String connection_uri) throws SQLException { + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @BeforeClass + public static void beforeClass() throws SQLException { + bigQueryConnection = DriverManager.getConnection(connection_uri, new Properties()); + bigQueryStatement = bigQueryConnection.createStatement(); + + Properties noReadApi = new Properties(); + noReadApi.setProperty("EnableHighThroughputAPI", "0"); + bigQueryConnectionNoReadApi = DriverManager.getConnection(connection_uri, noReadApi); + bigQueryStatementNoReadApi = bigQueryConnectionNoReadApi.createStatement(); + bigQuery = BigQueryOptions.newBuilder().build().getService(); + } + + @AfterClass + public static void afterClass() throws SQLException { + bigQueryStatement.close(); + bigQueryConnection.close(); + bigQueryStatementNoReadApi.close(); + bigQueryConnectionNoReadApi.close(); + } + + @Test + public void testValidServiceAccountAuthentication() throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + File tempFile = File.createTempFile("auth", ".json"); + tempFile.deleteOnExit(); + Files.write(tempFile.toPath(), authJson.toString().getBytes()); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthPvtKeyPath=" + + tempFile.toPath() + + ";"; + + validateConnection(connection_uri); + } + + @Test + public void testServiceAccountAuthenticationMissingOAuthPvtKeyPath() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;"; + + try { + DriverManager.getConnection(connection_uri); + Assert.fail(); + } catch (BigQueryJdbcRuntimeException ex) { + assertTrue(ex.getMessage().contains("No valid credentials provided.")); + } + } + + @Test + public void testValidServiceAccountAuthenticationOAuthPvtKeyAsPath() + throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + File tempFile = File.createTempFile("auth", ".json"); + tempFile.deleteOnExit(); + Files.write(tempFile.toPath(), authJson.toString().getBytes()); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=;" + + ";OAuthPvtKey=" + + tempFile.toPath() + + ";"; + validateConnection(connection_uri); + } + + @Test + public void testValidServiceAccountAuthenticationViaEmailAndPkcs8Key() + throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=" + + authJson.get("client_email").getAsString() + + ";OAuthPvtKey=" + + authJson.get("private_key").getAsString() + + ";"; + validateConnection(connection_uri); + } + + @Test + public void testValidServiceAccountAuthenticationOAuthPvtKeyAsJson() + throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + authJson.get("project_id").getAsString() + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=;" + + ";OAuthPvtKey=" + + authJson.toString() + + ";"; + validateConnection(connection_uri); + } + + // TODO(kirl): Enable this test when pipeline has p12 secret available. + @Test + @Ignore + public void testValidServiceAccountAuthenticationP12() throws SQLException, IOException { + final JsonObject authJson = getAuthJson(); + final String p12_file = requireEnvVar("SA_SECRET_P12"); + + final String connectionUri = + getBaseUri(0, authJson.get("project_id").getAsString()) + .append("OAuthServiceAcctEmail", authJson.get("client_email").getAsString()) + .append("OAuthPvtKeyPath", p12_file) + .toString(); + validateConnection(connectionUri); + } + + @Test + @Ignore + public void testValidGoogleUserAccountAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAuthType=1;OAuthClientId=client_id;OAuthClientSecret=client_secret;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_USER_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + @Ignore + public void testValidExternalAccountAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;" + + "BYOID_AudienceUri=//iam.googleapis.com/projects//locations//workloadIdentityPools//providers/;" + + "BYOID_SubjectTokenType=;BYOID_CredentialSource={\"file\":\"/path/to/file\"};" + + "BYOID_SA_Impersonation_Uri=;BYOID_TokenUri=;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + @Ignore + public void testValidExternalAccountAuthenticationFromFile() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;" + + "OAuthPvtKeyPath=/path/to/file;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + @Ignore + public void testValidExternalAccountAuthenticationRawJson() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;OAuthPvtKey={\n" + + " \"universe_domain\": \"googleapis.com\",\n" + + " \"type\": \"external_account\",\n" + + " \"audience\":" + + " \"//iam.googleapis.com/projects//locations//workloadIdentityPools//providers/\",\n" + + " \"subject_token_type\": \"\",\n" + + " \"token_url\": \"\",\n" + + " \"credential_source\": {\n" + + " \"file\": \"/path/to/file\"\n" + + " },\n" + + " \"service_account_impersonation_url\": \"\"\n" + + "};"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + // TODO(farhan): figure out how to programmatically generate an access token and test + @Test + @Ignore + public void testValidPreGeneratedAccessTokenAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=2;OAuthAccessToken=access_token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "PRE_GENERATED_TOKEN", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + // TODO(obada): figure out how to programmatically generate a refresh token and test + @Test + @Ignore + public void testValidRefreshTokenAuthentication() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=2;OAuthRefreshToken=refresh_token;" + + ";OAuthClientId=client;OAuthClientSecret=secret;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "PRE_GENERATED_TOKEN", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertEquals(50, resultSetRowCount(resultSet)); + connection.close(); + } + + @Test + public void testValidApplicationDefaultCredentialsAuthentication() throws SQLException { + String connection_uri = getBaseUri(3, PROJECT_ID).toString(); + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + connection.close(); + } + + // This test is useing the same client email as a main authorization & impersonation. + // It requires account to have 'tokenCreator' permission, see + // https://cloud.google.com/docs/authentication/use-service-account-impersonation#required-roles + @Test + public void testServiceAccountAuthenticationWithImpersonation() throws IOException, SQLException { + final JsonObject authJson = getAuthJson(); + + String connection_uri = + getBaseUri(0, authJson.get("project_id").getAsString()) + .append("OAuthServiceAcctEmail", authJson.get("client_email").getAsString()) + .append("OAuthPvtKey", authJson.get("private_key").getAsString()) + .append("ServiceAccountImpersonationEmail", authJson.get("client_email").getAsString()) + .toString(); + validateConnection(connection_uri); + } + + // This test uses the same client email for the main authorization and a chain of impersonations. + // It requires the account to have 'tokenCreator' permission on itself. + @Test + public void testServiceAccountAuthenticationWithChainedImpersonation() + throws IOException, SQLException { + final JsonObject authJson = getAuthJson(); + String clientEmail = authJson.get("client_email").getAsString(); + + String connection_uri = + getBaseUri(0, authJson.get("project_id").getAsString()) + .append("OAuthServiceAcctEmail", clientEmail) + .append("OAuthPvtKey", authJson.get("private_key").getAsString()) + .append("ServiceAccountImpersonationEmail", clientEmail) + .append("ServiceAccountImpersonationChain", clientEmail + "," + clientEmail) + .toString(); + validateConnection(connection_uri); + } + + @Test + public void testFastQueryPathSmall() throws SQLException { + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(850, resultSetRowCount(jsonResultSet)); + } + + @Test + public void testSmallSelectAndVerifyResults() throws SQLException { + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` WHERE" + + " repository_name LIKE 'X%' LIMIT 10"; + + ResultSet resultSet = bigQueryStatement.executeQuery(query); + int rowCount = 0; + while (resultSet.next()) { + assertTrue(resultSet.getString(1).startsWith("X")); + rowCount++; + } + assertEquals(10, rowCount); + } + + @Test + // reads without using ReadAPI and makes sure that they are in order, which implies threads worked + // correctly + public void testIterateOrderJsonMultiThread_NoReadApi() throws SQLException { + int expectedCnt = 10000; + String query = String.format(BASE_QUERY, expectedCnt); + ResultSet rs = bigQueryStatementNoReadApi.executeQuery(query); + int cnt = 0; + double oldTriDis = 0.0d; + while (rs.next()) { + double tripDis = rs.getDouble("trip_distance"); + ++cnt; + assertTrue(oldTriDis <= tripDis); + oldTriDis = tripDis; + } + assertEquals(expectedCnt, cnt); // all the records were retrieved + } + + @Test + public void testInvalidQuery() throws SQLException { + String query = "SELECT *"; + + try { + bigQueryStatement.executeQuery(query); + Assert.fail(); + } catch (BigQueryJdbcException e) { + assertTrue(e.getMessage().contains("SELECT * must have a FROM clause")); + } + } + + @Test + public void testDriver() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertNotNull(connection); + Statement st = connection.createStatement(); + boolean rs = + st.execute("Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(rs); + connection.close(); + } + + @Test + public void testDefaultDataset() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;DEFAULTDATASET=testDataset"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertNotNull(connection); + assertEquals( + DatasetId.of("testDataset"), ((BigQueryConnection) connection).getDefaultDataset()); + + String connection_uri_null_default_dataset = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + assertTrue(driver.acceptsURL(connection_uri_null_default_dataset)); + + Connection connection2 = driver.connect(connection_uri_null_default_dataset, new Properties()); + assertNotNull(connection2); + assertNull(((BigQueryConnection) connection2).getDefaultDataset()); + connection.close(); + connection2.close(); + } + + @Test + public void testDefaultDatasetWithProject() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;DEFAULTDATASET=" + + PROJECT_ID + + ".testDataset"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertNotNull(connection); + assertEquals( + DatasetId.of(PROJECT_ID, "testDataset"), + ((BigQueryConnection) connection).getDefaultDataset()); + connection.close(); + } + + @Test + public void testLocation() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;LOCATION=EU"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + assertTrue(driver.acceptsURL(connection_uri)); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertEquals(((BigQueryConnection) connection).getLocation(), "EU"); + + Statement statement = connection.createStatement(); + + // Query a dataset in the EU + String query = + "SELECT name FROM `bigquery-public-data.covid19_italy_eu.data_by_province` LIMIT 100"; + ResultSet resultSet = statement.executeQuery(query); + assertEquals(100, resultSetRowCount(resultSet)); + + String connection_uri_null_location = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + assertTrue(driver.acceptsURL(connection_uri_null_location)); + + Connection connection2 = driver.connect(connection_uri_null_location, new Properties()); + assertNotNull(connection2); + assertNull(((BigQueryConnection) connection2).getLocation()); + connection.close(); + connection2.close(); + } + + @Test + public void testIncorrectLocation() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;LOCATION=europe-west3"; + + Driver driver = BigQueryDriver.getRegisteredDriver(); + + Connection connection = driver.connect(connection_uri, new Properties()); + assertEquals(((BigQueryConnection) connection).getLocation(), "europe-west3"); + + // Query a dataset in the US + Statement statement = connection.createStatement(); + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + BigQueryJdbcException ex = + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(query)); + BigQueryError error = ex.getBigQueryException().getError(); + assertNotNull(error); + assertEquals("accessDenied", error.getReason()); + connection.close(); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityUnsupportedTypeForwardOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, 1, 1)); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityUnsupportedConcurReadOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(1, ResultSet.CONCUR_READ_ONLY, 1)); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityUnsupportedCloseCursorsAtCommit() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(1, 1, ResultSet.CLOSE_CURSORS_AT_COMMIT)); + } + + @Test + public void testCreateStatementWithResultSetConcurrencyUnsupportedTypeForwardOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(ResultSet.TYPE_FORWARD_ONLY, 1)); + } + + @Test + public void testCreateStatementWithResultSetConcurrencyUnsupportedConcurReadOnly() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.createStatement(1, ResultSet.CONCUR_READ_ONLY)); + } + + @Test + public void testSetTransactionIsolationToNotSerializableThrowsNotSupported() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryConnection.setTransactionIsolation(Connection.TRANSACTION_SERIALIZABLE + 1)); + } + + @Test + public void testSetHoldabilityForNonCloseCursorsThrowsNotSupported() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> connection.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT + 1)); + connection.close(); + } + + @Test + public void testCreateStatementWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, connection::createStatement); + } + + @Test + public void testCreateStatementWithResultSetHoldabilityWhenConnectionClosedThrows() + throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.createStatement(1, 1, 1)); + } + + @Test + public void testCreateStatementWithResultSetConcurrencyWhenConnectionClosedThrows() + throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.createStatement(1, 1)); + } + + @Test + public void testSetAutoCommitWithClosedConnectionThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.setAutoCommit(true)); + } + + @Test + public void testSetCommitToFalseWithoutSessionEnabledThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(connection_uri); + assertThrows(IllegalStateException.class, () -> connection.setAutoCommit(false)); + connection.close(); + } + + @Test + public void testCommitWithConnectionClosedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.close(); + assertThrows(IllegalStateException.class, connection::commit); + } + + @Test + public void testCommitToFalseWithoutSessionEnabledThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(connection_uri); + assertThrows(IllegalStateException.class, connection::commit); + connection.close(); + } + + @Test + public void testCommitWithNoTransactionStartedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + assertThrows(IllegalStateException.class, connection::commit); + connection.close(); + } + + @Test + public void testRollbackWithConnectionClosedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.close(); + assertThrows(IllegalStateException.class, connection::rollback); + } + + @Test + public void testRollbackToFalseWithoutSessionEnabledThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(connection_uri); + assertThrows(IllegalStateException.class, connection::rollback); + connection.close(); + } + + @Test + public void testRollbackWithoutTransactionStartedThrowsIllegalState() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + assertThrows(IllegalStateException.class, connection::rollback); + connection.close(); + } + + @Test + public void testGetLocationWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows( + IllegalStateException.class, () -> ((BigQueryConnection) connection).getLocation()); + connection.close(); + } + + @Test + public void testGetDefaultDatasetWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows( + IllegalStateException.class, () -> ((BigQueryConnection) connection).getDefaultDataset()); + } + + @Test + public void testGetAutocommitWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, connection::getAutoCommit); + } + + @Test + public void testSetAutocommitWhenConnectionClosedThrows() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + + connection.close(); + assertThrows(IllegalStateException.class, () -> connection.setAutoCommit(true)); + } + + @Test + public void testExecuteUpdate() throws SQLException { + String TABLE_NAME = "JDBC_EXECUTE_UPDATE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) " + + "VALUES ('string1',111 ), ('string2',111 ), ('string3',222 ), ('string4',333 );", + DATASET, TABLE_NAME); + String updateQuery = + String.format( + "UPDATE %s.%s SET StringField='Jane Doe' WHERE IntegerField=111", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + int createStatus = bigQueryStatement.executeUpdate(createQuery); + assertEquals(0, createStatus); + + int insertStatus = bigQueryStatement.executeUpdate(insertQuery); + assertEquals(4, insertStatus); + + bigQueryStatement.executeQuery(selectQuery); + int selectStatus = bigQueryStatement.getUpdateCount(); + assertEquals(-1, selectStatus); + + int updateStatus = bigQueryStatement.executeUpdate(updateQuery); + assertEquals(2, updateStatus); + + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testExecuteQueryWithInsert() throws SQLException { + String TABLE_NAME = "JDBC_EXECUTE_UPDATE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + + assertEquals(0, bigQueryStatement.executeUpdate(createQuery)); + assertThrows(BigQueryJdbcException.class, () -> bigQueryStatement.executeQuery(dropQuery)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testExecuteQueryWithMultipleReturns() throws SQLException { + String query = + String.format("SELECT * FROM bigquery-public-data.samples.github_timeline LIMIT 1;"); + + assertThrows(BigQueryJdbcException.class, () -> bigQueryStatement.executeQuery(query + query)); + } + + @Test + public void testExecuteUpdateWithSelect() throws SQLException { + String selectQuery = + String.format("SELECT * FROM bigquery-public-data.samples.github_timeline LIMIT 1;"); + + assertThrows(BigQueryJdbcException.class, () -> bigQueryStatement.executeUpdate(selectQuery)); + } + + @Test + public void testExecuteMethod() throws SQLException { + + String TABLE_NAME = "JDBC_EXECUTE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) " + + "VALUES ('string1',111 ), ('string2',111 ), ('string3',222 ), ('string4',333 );", + DATASET, TABLE_NAME); + String updateQuery = + String.format( + "UPDATE %s.%s SET StringField='Jane Doe' WHERE IntegerField=111", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + boolean createStatus = bigQueryStatement.execute(createQuery); + assertFalse(createStatus); + + boolean insertStatus = bigQueryStatement.execute(insertQuery); + assertFalse(insertStatus); + + boolean selectStatus = bigQueryStatement.execute(selectQuery); + assertTrue(selectStatus); + int selectCount = bigQueryStatement.getUpdateCount(); + assertEquals(-1, selectCount); + ResultSet resultSet = bigQueryStatement.getResultSet(); + assertNotNull(resultSet); + + boolean updateStatus = bigQueryStatement.execute(updateQuery); + assertFalse(updateStatus); + + boolean dropStatus = bigQueryStatement.execute(dropQuery); + assertFalse(dropStatus); + } + + @Test + public void testPreparedExecuteMethod() throws SQLException { + + String TABLE_NAME = "JDBC_PREPARED_EXECUTE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING, `IntegerField` INTEGER);", + DATASET, TABLE_NAME); + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) VALUES (?,?), (?,?), (?,?), (?,?);", + DATASET, TABLE_NAME); + String updateQuery = + String.format("UPDATE %s.%s SET StringField=? WHERE IntegerField=?", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT ? FROM %s.%s", DATASET, TABLE_NAME); + + boolean createStatus = bigQueryStatement.execute(createQuery); + assertFalse(createStatus); + + PreparedStatement insertStmt = bigQueryConnection.prepareStatement(insertQuery); + insertStmt.setString(1, "String1"); + insertStmt.setInt(2, 111); + insertStmt.setString(3, "String2"); + insertStmt.setInt(4, 222); + insertStmt.setString(5, "String3"); + insertStmt.setInt(6, 333); + insertStmt.setString(7, "String4"); + insertStmt.setInt(8, 444); + + boolean insertStatus = insertStmt.execute(); + assertFalse(insertStatus); + + PreparedStatement selectStmt = bigQueryConnection.prepareStatement(selectQuery); + selectStmt.setString(1, "StringField"); + boolean selectStatus = selectStmt.execute(); + assertTrue(selectStatus); + + int selectCount = selectStmt.getUpdateCount(); + assertEquals(-1, selectCount); + ResultSet resultSet = selectStmt.getResultSet(); + assertNotNull(resultSet); + + PreparedStatement updateStmt = bigQueryConnection.prepareStatement(updateQuery); + updateStmt.setString(1, "Jane Doe"); + updateStmt.setInt(2, 222); + boolean updateStatus = updateStmt.execute(); + assertFalse(updateStatus); + + boolean dropStatus = bigQueryStatement.execute(dropQuery); + assertFalse(dropStatus); + } + + @Test + public void testPreparedStatementThrowsSyntaxError() throws SQLException { + String TABLE_NAME = "JDBC_PREPARED_SYNTAX_ERR_TABLE_" + randomNumber; + String createQuery = + String.format("CREATE OR REPLACE TABLE %s.%s (? STRING, ? INTEGER);", DATASET, TABLE_NAME); + + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(createQuery); + preparedStatement.setString(1, "StringField"); + preparedStatement.setString(2, "IntegerField"); + assertThrows(BigQueryJdbcSqlSyntaxErrorException.class, preparedStatement::execute); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testPreparedStatementThrowsJdbcException() throws SQLException { + String TABLE_NAME = "JDBC_PREPARED_MISSING_PARAM_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (StringField STRING, IntegerField INTEGER);", + DATASET, TABLE_NAME); + boolean createStatus = bigQueryStatement.execute(createQuery); + assertFalse(createStatus); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (StringField, IntegerField) " + "VALUES (?,?), (?,?);", + DATASET, TABLE_NAME); + PreparedStatement insertStmt = bigQueryConnection.prepareStatement(insertQuery); + insertStmt.setString(1, "String1"); + insertStmt.setInt(2, 111); + assertThrows(BigQueryJdbcException.class, insertStmt::execute); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + @Test + public void testSetFetchDirectionFetchReverseThrowsUnsupported() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryStatement.setFetchDirection(ResultSet.FETCH_REVERSE)); + } + + @Test + public void testSetFetchDirectionFetchUnknownThrowsUnsupported() { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> bigQueryStatement.setFetchDirection(ResultSet.FETCH_UNKNOWN)); + } + + @Test + public void testExecuteBatchQueryTypeSelectThrowsUnsupported() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` WHERE" + + " repository_name LIKE 'X%' LIMIT 10"; + Statement statement = connection.createStatement(); + + assertThrows(IllegalArgumentException.class, () -> statement.addBatch(query)); + connection.close(); + } + + @Test + public void testValidExecuteBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + int[] results; + for (int i = 0; i < 3; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + assertEquals(3, results.length); + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testAddBatchWithoutSemicolon() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MISSING_SEMICOLON_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', 4)"; + statement.addBatch(insertQuery); + statement.addBatch(insertQuery); + int[] results = statement.executeBatch(); + + // assertions + assertEquals(2, results.length); + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testEmptySqlToAddBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EMPTY_EXECUTE_BATCH_TABLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + String emptySql = ""; + statement.addBatch(emptySql); + int[] results = statement.executeBatch(); + + // assertions + assertEquals(0, results.length); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testEmptyExecuteBatch() throws SQLException { + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + int[] result = statement.executeBatch(); + + assertEquals(0, result.length); + connection.close(); + } + + @Test + public void testNonValidStatementTypeForAddBatchThrows() { + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_NON_VALID_TYPE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + assertThrows( + IllegalArgumentException.class, () -> bigQueryStatement.addBatch(createBatchTable)); + } + + @Test + public void testAllValidStatementTypesForAddBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_ALL_VALID_TYPES_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + "); "; + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 13 WHERE age = %s;", DATASET, BATCH_TABLE, randomNumber); + String deleteQuery = + String.format("DELETE FROM %s.%s WHERE name='Farhan';", DATASET, BATCH_TABLE); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + statement.addBatch(insertQuery); + statement.addBatch(updateQuery); + statement.addBatch(deleteQuery); + int[] results = statement.executeBatch(); + + // assertion + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testUnsupportedHTAPIFallbacksToStandardQueriesWithRange() throws SQLException { + String selectQuery = "select * from `DATATYPERANGETEST.RangeIntervalTestTable` LIMIT 5000;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=1;UnsupportedHTAPIFallback=1;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet); + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(3, resultSetMetaData.getColumnCount()); + connection.close(); + } + + @Test + public void testIntervalDataTypeWithArrowResultSet() throws SQLException { + String selectQuery = + "select * from `DATATYPERANGETEST.RangeIntervalTestTable` order by intColumn limit 5000;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=1;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + + ResultSet resultSet = statement.executeQuery(selectQuery); + assertTrue(resultSet.getClass().getName().contains("BigQueryArrowResultSet")); + resultSet.next(); + assertEquals("0-0 10 -12:30:0.0", resultSet.getString("intervalField")); + + // cleanup + connection.close(); + } + + @Test + public void testIntervalDataTypeWithJsonResultSet() throws SQLException { + String selectQuery = + "select * from `DATATYPERANGETEST.RangeIntervalTestTable` order by intColumn limit 10 ;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=0;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + + ResultSet resultSet = statement.executeQuery(selectQuery); + assertTrue(resultSet.getClass().getName().contains("BigQueryJsonResultSet")); + resultSet.next(); + assertEquals("0-0 10 -12:30:0", resultSet.getString("intervalField")); + + // cleanup + connection.close(); + } + + @Test + public void testValidLEPEndpointQuery() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://us-east4-bigquery.googleapis.com;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet.getMetaData()); + connection.close(); + } + + @Test + public void testValidEndpointWithInvalidBQPortThrows() throws SQLException { + String TABLE_NAME = "JDBC_REGIONAL_TABLE_" + randomNumber; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://us-east4-bigquery.googleapis.com:12312312;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(selectQuery)); + connection.close(); + } + + @Test + public void testLEPEndpointDataNotFoundThrows() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://us-east5-bigquery.googleapis.com;"; + + // Attempting read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(selectQuery)); + connection.close(); + } + + @Test + public void testValidREPEndpointQuery() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://bigquery.us-east4.rep.googleapis.com;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet.getMetaData()); + connection.close(); + } + + @Test + public void testREPEndpointDataNotFoundThrows() throws SQLException { + String DATASET = "JDBC_REGIONAL_DATASET"; + String TABLE_NAME = "REGIONAL_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";" + + "EndpointOverrides=BIGQUERY=https://bigquery.us-east7.rep.googleapis.com;"; + + // Attempting read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.executeQuery(selectQuery)); + connection.close(); + } + + @Test + public void testCloseStatement() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10"; + Statement statement = bigQueryConnection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertEquals(10, resultSetRowCount(jsonResultSet)); + statement.close(); + assertTrue(statement.isClosed()); + } + + @Test + public void testCloseableStatementSingleResult() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10"; + Statement statement = bigQueryConnection.createStatement(); + statement.closeOnCompletion(); + assertTrue(statement.isCloseOnCompletion()); + ResultSet jsonResultSet = statement.executeQuery(query); + assertFalse(statement.isClosed()); + jsonResultSet.close(); + assertTrue(statement.isClosed()); + } + + @Test + public void testCloseableStatementMultiResult() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10;"; + Statement statement = bigQueryConnection.createStatement(); + statement.closeOnCompletion(); + assertTrue(statement.isCloseOnCompletion()); + statement.execute(query + query); + assertNotNull(statement.getResultSet()); + assertFalse(statement.isClosed()); + + assertTrue(statement.getMoreResults()); + assertNotNull(statement.getResultSet()); + assertFalse(statement.isClosed()); + + assertFalse(statement.getMoreResults()); + assertTrue(statement.isClosed()); + } + + @Test + public void testCloseableStatementMultiResultExplicitClose() throws SQLException { + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 10;"; + Statement statement = bigQueryConnection.createStatement(); + statement.closeOnCompletion(); + assertTrue(statement.isCloseOnCompletion()); + statement.execute(query + query); + ResultSet result = statement.getResultSet(); + result.close(); + assertFalse(statement.isClosed()); + + assertTrue(statement.getMoreResults()); + result = statement.getResultSet(); + result.close(); + assertTrue(statement.isClosed()); + } + + @Test + public void testConnectionIsValid() throws SQLException { + assertTrue(bigQueryConnection.isValid(10)); + assertTrue(bigQueryConnectionNoReadApi.isValid(10)); + } + + @Test + public void testDataSource() throws SQLException { + DataSource ds = new DataSource(); + ds.setURL("jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;"); + ds.setOAuthType(3); + + try (Connection connection = ds.getConnection()) { + assertFalse(connection.isClosed()); + } + } + + @Test + public void testDataSourceOAuthPvtKeyPath() throws SQLException, IOException { + File tempFile = File.createTempFile("auth", ".json"); + tempFile.deleteOnExit(); + DataSource ds = new DataSource(); + ds.setURL("jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;"); + ds.setOAuthType(0); + ds.setOAuthPvtKeyPath(tempFile.toPath().toString()); + assertEquals(0, ds.getOAuthType().intValue()); + assertEquals(tempFile.toPath().toString(), ds.getOAuthPvtKeyPath()); + } + + @Test + public void testPreparedStatementSmallSelect() throws SQLException { + String query = + "SELECT * FROM `bigquery-public-data.samples.github_timeline` where repository_language=?" + + " LIMIT 1000"; + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(query); + preparedStatement.setString(1, "Java"); + + ResultSet jsonResultSet = preparedStatement.executeQuery(); + + int rowCount = resultSetRowCount(jsonResultSet); + assertEquals(1000, rowCount); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + } + + @Test + public void testPreparedStatementExecuteUpdate() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "Inventory" + random.nextInt(9999); + String TABLE_NAME2 = "DetailedInventory" + random.nextInt(9999); + + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER);", + DATASET, TABLE_NAME1); + + String createQuery2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER," + + " `supply_constrained` BOOLEAN, `comment` STRING);", + DATASET, TABLE_NAME2); + + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (product, quantity, supply_constrained, comment) " + + "VALUES ('countertop microwave', 20, NULL,'[]' )," + + " ('front load washer', 20, false,'[]' ), " + + " ('microwave', 20, false,'[]' ), " + + " ('refrigerator', 10, false,'[]' );", + DATASET, TABLE_NAME2); + + bigQueryStatement.execute(createQuery); + bigQueryStatement.execute(createQuery2); + bigQueryStatement.execute(insertQuery2); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (product, quantity) " + "VALUES (?,? ), (?,? );", + DATASET, TABLE_NAME1); + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 30); + insertPs.setString(3, "dryer"); + insertPs.setInt(4, 30); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(2, insertStatus); + + String updateQuery = + String.format("UPDATE %s.%s SET quantity=? WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement updatePs = bigQueryConnection.prepareStatement(updateQuery); + updatePs.setString(2, "dryer"); + updatePs.setInt(1, 35); + + int updateStatus = updatePs.executeUpdate(); + assertEquals(1, updateStatus); + + String deleteQuery = String.format("DELETE FROM %s.%s WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement deletePs = bigQueryConnection.prepareStatement(deleteQuery); + deletePs.setString(1, "dishwasher"); + + int deleteStatus = deletePs.executeUpdate(); + assertEquals(1, deleteStatus); + + String mergeQuery = + String.format( + "MERGE %s.%s T\n" + + "USING %s.%s S\n" + + "ON T.product = S.product\n" + + "WHEN NOT MATCHED AND quantity < ? THEN\n" + + " INSERT(product, quantity, supply_constrained, comment)\n" + + " VALUES(product, quantity, true, ?)\n" + + "WHEN NOT MATCHED THEN\n" + + " INSERT(product, quantity, supply_constrained)\n" + + " VALUES(product, quantity, false)", + DATASET, TABLE_NAME2, DATASET, TABLE_NAME1); + PreparedStatement mergePs = bigQueryConnection.prepareStatement(mergeQuery); + mergePs.setInt(1, 20); + mergePs.setString(2, "comment" + random.nextInt(999)); + + int mergeStatus = mergePs.executeUpdate(); + assertEquals(1, mergeStatus); + + ResultSet rs = + bigQueryStatement.executeQuery( + String.format("SELECT COUNT(*) AS row_count\n" + "FROM %s.%s", DATASET, TABLE_NAME2)); + rs.next(); + assertEquals(5, rs.getInt(1)); + + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1); + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME2)); + } + + @Test + public void testPreparedStatementDateTimeValues() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "DateTimeTestTable" + random.nextInt(9999); + + final String createTableQuery = + "CREATE OR REPLACE TABLE " + + " `%s.%s` " + + " (\n" + + "`StringField` STRING,\n" + + "`IntegerField` INTEGER,\n" + + "`TimestampField` TIMESTAMP,\n" + + "`TimeField` TIME,\n" + + "`DateField` DATE\n" + + ");"; + + String insertQuery = + String.format("INSERT INTO %s.%s VALUES (?,?,?,?,? );", DATASET, TABLE_NAME1); + + bigQueryStatement.execute(String.format(createTableQuery, DATASET, TABLE_NAME1)); + + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 1); + insertPs.setTimestamp(3, Timestamp.from(Instant.now())); + insertPs.setTime(4, Time.valueOf(LocalTime.NOON)); + insertPs.setDate(5, Date.valueOf("2025-12-3")); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(1, insertStatus); + + ResultSet rs = + bigQueryStatement.executeQuery( + String.format("SELECT COUNT(*) AS row_count\n" + "FROM %s.%s", DATASET, TABLE_NAME1)); + rs.next(); + assertEquals(1, rs.getInt(1)); + + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1); + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + } + + @Test + public void testValidDestinationTableSavesQueriesWithLegacySQL() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=destination_table_test_legacy;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM `INTEGRATION_TESTS.destination_table_test_legacy`;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertTrue(0 < resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test_legacy` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + connection.close(); + } + + @Test + public void testValidDestinationTableSavesQueriesWithStandardSQL() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=SQL;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + String selectLegacyQuery = + "SELECT * FROM `bigquery-public-data.deepmind_alphafold.metadata` LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM INTEGRATION_TESTS.destination_table_test;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(200, resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + connection.close(); + } + + @Test + public void testDestinationTableAndDestinationDatasetThatDoesNotExistsCreates() + throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=FakeTable;" + + "LargeResultDataset=FakeDataset;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String separateQuery = "SELECT * FROM FakeDataset.FakeTable;"; + boolean result = bigQueryStatement.execute(separateQuery); + assertTrue(result); + + // clean up + bigQueryStatement.execute("DROP SCHEMA FakeDataset CASCADE;"); + connection.close(); + } + + @Test + public void testDestinationTableWithMissingDestinationDatasetDefaults() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=FakeTable;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String separateQuery = "SELECT * FROM _google_jdbc.FakeTable;"; + boolean result = bigQueryStatement.execute(separateQuery); + assertTrue(result); + connection.close(); + } + + @Test + public void testNonSelectForLegacyDestinationTableThrows() throws SQLException { + // setup + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "AllowLargeResults=1;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows(BigQueryJdbcException.class, () -> statement.execute(createTransactionTable)); + connection.close(); + } + + @Test + public void testNonSelectForStandardDestinationTableDoesNotThrow() throws SQLException { + // setup + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=SQL;" + + "AllowLargeResults=1;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + statement.execute(createTransactionTable); + connection.close(); + } + + @Test + public void testTableConstraints() throws SQLException { + ResultSet primaryKey1 = + bigQueryConnection + .getMetaData() + .getPrimaryKeys(PROJECT_ID, CONSTRAINTS_DATASET, CONSTRAINTS_TABLE_NAME); + primaryKey1.next(); + assertEquals("id", primaryKey1.getString(4)); + assertFalse(primaryKey1.next()); + + ResultSet primaryKey2 = + bigQueryConnection + .getMetaData() + .getPrimaryKeys(PROJECT_ID, CONSTRAINTS_DATASET, CONSTRAINTS_TABLE_NAME2); + primaryKey2.next(); + assertEquals("first_name", primaryKey2.getString(4)); + primaryKey2.next(); + assertEquals("last_name", primaryKey2.getString(4)); + assertFalse(primaryKey2.next()); + + ResultSet foreignKeys = + bigQueryConnection + .getMetaData() + .getImportedKeys(PROJECT_ID, CONSTRAINTS_DATASET, CONSTRAINTS_TABLE_NAME); + foreignKeys.next(); + assertEquals(CONSTRAINTS_TABLE_NAME2, foreignKeys.getString(3)); + assertEquals("first_name", foreignKeys.getString(4)); + assertEquals("name", foreignKeys.getString(8)); + foreignKeys.next(); + assertEquals(CONSTRAINTS_TABLE_NAME2, foreignKeys.getString(3)); + assertEquals("last_name", foreignKeys.getString(4)); + assertEquals("second_name", foreignKeys.getString(8)); + foreignKeys.next(); + assertEquals(CONSTRAINTS_TABLE_NAME3, foreignKeys.getString(3)); + assertEquals("address", foreignKeys.getString(4)); + assertEquals("address", foreignKeys.getString(8)); + assertFalse(foreignKeys.next()); + + ResultSet crossReference = + bigQueryConnection + .getMetaData() + .getCrossReference( + PROJECT_ID, + CONSTRAINTS_DATASET, + CONSTRAINTS_TABLE_NAME2, + PROJECT_ID, + CONSTRAINTS_DATASET, + CONSTRAINTS_TABLE_NAME); + crossReference.next(); + assertEquals(CONSTRAINTS_TABLE_NAME2, crossReference.getString(3)); + assertEquals("first_name", crossReference.getString(4)); + assertEquals("name", crossReference.getString(8)); + crossReference.next(); + assertEquals("last_name", crossReference.getString(4)); + assertEquals("second_name", crossReference.getString(8)); + assertFalse(crossReference.next()); + } + + @Test + public void testDatabaseMetadataGetCatalogs() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + try (ResultSet rs = databaseMetaData.getCatalogs()) { + assertNotNull("ResultSet from getCatalogs() should not be null", rs); + + ResultSetMetaData rsmd = rs.getMetaData(); + assertNotNull("ResultSetMetaData should not be null", rsmd); + assertEquals("Should have one column", 1, rsmd.getColumnCount()); + assertEquals("Column name should be TABLE_CAT", "TABLE_CAT", rsmd.getColumnName(1)); + + assertTrue("ResultSet should have one row", rs.next()); + assertEquals("Catalog name should match Project ID", PROJECT_ID, rs.getString("TABLE_CAT")); + assertFalse("ResultSet should have no more rows", rs.next()); + } + } + + @Test + public void testDatabaseMetadataGetProcedures() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String procedureName = "create_customer"; + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + ResultSet resultSet = databaseMetaData.getProcedures(PROJECT_ID, DATASET, procedureName); + while (resultSet.next()) { + assertEquals(PROJECT_ID, resultSet.getString("PROCEDURE_CAT")); + assertEquals(DATASET, resultSet.getString("PROCEDURE_SCHEM")); + assertEquals(procedureName, resultSet.getString("PROCEDURE_NAME")); + assertEquals(procedureName, resultSet.getString("SPECIFIC_NAME")); + assertEquals(DatabaseMetaData.procedureResultUnknown, resultSet.getInt("PROCEDURE_TYPE")); + } + } + + @Test + public void testDatabaseMetadataGetProcedureColumns() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + + // --- Test Case 1: Specific schema and procedure, null column name pattern --- + String specificSchema = "JDBC_INTEGRATION_DATASET"; + String specificProcedure = "create_customer"; + ResultSet resultSet = + databaseMetaData.getProcedureColumns(PROJECT_ID, specificSchema, specificProcedure, null); + int specificProcRows = 0; + boolean foundNameParam = false; + boolean foundIdParam = false; + while (resultSet.next()) { + specificProcRows++; + assertEquals(PROJECT_ID, resultSet.getString("PROCEDURE_CAT")); + assertEquals(specificSchema, resultSet.getString("PROCEDURE_SCHEM")); + assertEquals(specificProcedure, resultSet.getString("PROCEDURE_NAME")); + assertEquals(specificProcedure, resultSet.getString("SPECIFIC_NAME")); + if ("name".equals(resultSet.getString("COLUMN_NAME"))) { + foundNameParam = true; + assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); + } + if ("id".equals(resultSet.getString("COLUMN_NAME"))) { + foundIdParam = true; + assertEquals(2, resultSet.getInt("ORDINAL_POSITION")); + } + } + assertEquals("Should find 2 parameters for " + specificProcedure, 2, specificProcRows); + assertTrue("Parameter 'name' should be found", foundNameParam); + assertTrue("Parameter 'id' should be found", foundIdParam); + resultSet.close(); + + // --- Test Case 2: Specific schema, procedure, and column name pattern --- + String specificColumn = "name"; + resultSet = + databaseMetaData.getProcedureColumns( + PROJECT_ID, specificSchema, specificProcedure, specificColumn); + assertTrue("Should find the specific column 'name'", resultSet.next()); + assertEquals(PROJECT_ID, resultSet.getString("PROCEDURE_CAT")); + assertEquals(specificSchema, resultSet.getString("PROCEDURE_SCHEM")); + assertEquals(specificProcedure, resultSet.getString("PROCEDURE_NAME")); + assertEquals(specificColumn, resultSet.getString("COLUMN_NAME")); + assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); + assertEquals( + (short) DatabaseMetaData.procedureColumnUnknown, resultSet.getShort("COLUMN_TYPE")); + assertEquals(java.sql.Types.NVARCHAR, resultSet.getInt("DATA_TYPE")); + assertEquals("NVARCHAR", resultSet.getString("TYPE_NAME")); + assertFalse("Should only find one row for exact column match", resultSet.next()); + resultSet.close(); + + // --- Test Case 3: Non-existent procedure --- + resultSet = + databaseMetaData.getProcedureColumns( + PROJECT_ID, specificSchema, "non_existent_procedure_xyz", null); + assertFalse("Should not find columns for a non-existent procedure", resultSet.next()); + resultSet.close(); + } + + @Test + public void testDatabaseMetadataGetColumns() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_DATATYPES_INTEGRATION_TEST_TABLE"; + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + + // --- Test Case 1: Specific Column (StringField) --- + ResultSet resultSet = + databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "StringField"); + + assertTrue(resultSet.next()); + assertEquals(PROJECT_ID, resultSet.getString("TABLE_CAT")); + assertEquals(DATASET, resultSet.getString("TABLE_SCHEM")); + assertEquals(TABLE_NAME, resultSet.getString("TABLE_NAME")); + assertEquals("StringField", resultSet.getString("COLUMN_NAME")); + assertEquals("NVARCHAR", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(6, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 2: All Columns --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, null); + assertTrue(resultSet.next()); + int count = 0; + do { + count++; + assertEquals(PROJECT_ID, resultSet.getString("TABLE_CAT")); + assertEquals(DATASET, resultSet.getString("TABLE_SCHEM")); + assertEquals(TABLE_NAME, resultSet.getString("TABLE_NAME")); + assertNotNull(resultSet.getString("COLUMN_NAME")); + } while (resultSet.next()); + assertEquals(16, count); + + // --- Test Case 3: Column Name Pattern Matching (%Field) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "%Time%"); + assertTrue(resultSet.next()); + count = 0; + do { + count++; + String columnName = resultSet.getString("COLUMN_NAME"); + assertTrue(columnName.contains("Time")); + } while (resultSet.next()); + assertEquals(3, count); + + // --- Test Case 4: Column Name Pattern Matching (Integer%) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "Integer%"); + assertTrue(resultSet.next()); + assertEquals("IntegerField", resultSet.getString("COLUMN_NAME")); + assertEquals("BIGINT", resultSet.getString("TYPE_NAME")); + assertEquals(19, resultSet.getInt("COLUMN_SIZE")); + assertEquals(0, resultSet.getInt("DECIMAL_DIGITS")); + assertEquals(10, resultSet.getInt("NUM_PREC_RADIX")); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(2, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 5: Specific Column (BooleanField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "BooleanField"); + assertTrue(resultSet.next()); + assertEquals("BooleanField", resultSet.getString("COLUMN_NAME")); + assertEquals("BOOLEAN", resultSet.getString("TYPE_NAME")); + assertEquals(1, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(1, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 6: Specific Column (NumericField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "NumericField"); + assertTrue(resultSet.next()); + assertEquals("NumericField", resultSet.getString("COLUMN_NAME")); + assertEquals("NUMERIC", resultSet.getString("TYPE_NAME")); + assertEquals(38, resultSet.getInt("COLUMN_SIZE")); + assertEquals(9, resultSet.getInt("DECIMAL_DIGITS")); + assertEquals(10, resultSet.getInt("NUM_PREC_RADIX")); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(4, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 7: Specific Column (BytesField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "BytesField"); + assertTrue(resultSet.next()); + assertEquals("BytesField", resultSet.getString("COLUMN_NAME")); + assertEquals("VARBINARY", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(7, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 8: Specific Column (ArrayField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "ArrayField"); + assertTrue(resultSet.next()); + assertEquals("ArrayField", resultSet.getString("COLUMN_NAME")); + assertEquals("ARRAY", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(9, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 9: Specific Column (TimestampField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "TimestampField"); + assertTrue(resultSet.next()); + assertEquals("TimestampField", resultSet.getString("COLUMN_NAME")); + assertEquals("TIMESTAMP", resultSet.getString("TYPE_NAME")); + assertEquals(29, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(10, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 10: Specific Column (DateField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "DateField"); + assertTrue(resultSet.next()); + assertEquals("DateField", resultSet.getString("COLUMN_NAME")); + assertEquals("DATE", resultSet.getString("TYPE_NAME")); + assertEquals(10, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(11, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 11: Specific Column (TimeField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "TimeField"); + assertTrue(resultSet.next()); + assertEquals("TimeField", resultSet.getString("COLUMN_NAME")); + assertEquals("TIME", resultSet.getString("TYPE_NAME")); + assertEquals(15, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(12, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 12: Specific Column (DateTimeField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "DateTimeField"); + assertTrue(resultSet.next()); + assertEquals("DateTimeField", resultSet.getString("COLUMN_NAME")); + assertEquals("TIMESTAMP", resultSet.getString("TYPE_NAME")); + assertEquals(29, resultSet.getInt("COLUMN_SIZE")); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(13, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + + // --- Test Case 13: Specific Column (GeographyField) --- + resultSet = databaseMetaData.getColumns(PROJECT_ID, DATASET, TABLE_NAME, "GeographyField"); + assertTrue(resultSet.next()); + assertEquals("GeographyField", resultSet.getString("COLUMN_NAME")); + assertEquals("VARCHAR", resultSet.getString("TYPE_NAME")); + resultSet.getObject("COLUMN_SIZE"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("DECIMAL_DIGITS"); + assertTrue(resultSet.wasNull()); + resultSet.getObject("NUM_PREC_RADIX"); + assertTrue(resultSet.wasNull()); + assertEquals(1, resultSet.getInt("NULLABLE")); + assertEquals(14, resultSet.getInt("ORDINAL_POSITION")); + assertFalse(resultSet.next()); + } + + @Test + public void testDatabaseMetadataGetTables() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String DATASET = "JDBC_TABLE_TYPES_TEST"; + + // --- Test Case 1: Get all tables (types = null) --- + ResultSet rsAll = databaseMetaData.getTables(PROJECT_ID, DATASET, null, null); + Set allTableNames = new HashSet<>(); + while (rsAll.next()) { + allTableNames.add(rsAll.getString("TABLE_NAME")); + } + assertTrue(allTableNames.contains("base_table")); + assertTrue(allTableNames.contains("my_view")); + assertTrue(allTableNames.contains("external_table")); + assertTrue(allTableNames.contains("my_materialized_view")); + assertTrue(allTableNames.contains("base_table_clone")); + assertTrue(allTableNames.contains("base_table_snapshot")); + assertEquals(6, allTableNames.size()); + + // --- Test Case 2: Get only "TABLE" type --- + ResultSet rsTable = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"TABLE"}); + Set tableNames = new HashSet<>(); + while (rsTable.next()) { + tableNames.add(rsTable.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains("base_table")); + assertTrue(tableNames.contains("base_table_clone")); + assertEquals(2, tableNames.size()); + + // --- Test Case 3: Get "VIEW" type --- + ResultSet rsView = databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"VIEW"}); + assertTrue(rsView.next()); + assertEquals("my_view", rsView.getString("TABLE_NAME")); + assertEquals("VIEW", rsView.getString("TABLE_TYPE")); + assertFalse(rsView.next()); + + // --- Test Case 4: Get "EXTERNAL TABLE" type --- + ResultSet rsExternal = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"EXTERNAL"}); + assertTrue(rsExternal.next()); + assertEquals("external_table", rsExternal.getString("TABLE_NAME")); + assertEquals("EXTERNAL", rsExternal.getString("TABLE_TYPE")); + assertFalse(rsExternal.next()); + + // --- Test Case 5: Get "MATERIALIZED_VIEW" type --- + ResultSet rsMaterialized = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"MATERIALIZED_VIEW"}); + assertTrue(rsMaterialized.next()); + assertEquals("my_materialized_view", rsMaterialized.getString("TABLE_NAME")); + assertEquals("MATERIALIZED_VIEW", rsMaterialized.getString("TABLE_TYPE")); + assertFalse(rsMaterialized.next()); + + // --- Test Case 6: Get "SNAPSHOT" type --- + ResultSet rsSnapshot = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"SNAPSHOT"}); + assertTrue(rsSnapshot.next()); + assertEquals("base_table_snapshot", rsSnapshot.getString("TABLE_NAME")); + assertEquals("SNAPSHOT", rsSnapshot.getString("TABLE_TYPE")); + assertFalse(rsSnapshot.next()); + + // --- Test Case 8: Get multiple types ("TABLE" and "VIEW") --- + ResultSet rsMulti = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {"TABLE", "VIEW"}); + Set multiTableNames = new HashSet<>(); + while (rsMulti.next()) { + multiTableNames.add(rsMulti.getString("TABLE_NAME")); + } + assertTrue(multiTableNames.contains("base_table")); + assertTrue(multiTableNames.contains("base_table_clone")); + assertTrue(multiTableNames.contains("my_view")); + assertEquals(3, multiTableNames.size()); + + // --- Test Case 9: tableNamePattern --- + ResultSet rsNamePattern = databaseMetaData.getTables(PROJECT_ID, DATASET, "base%", null); + Set baseTableNames = new HashSet<>(); + while (rsNamePattern.next()) { + baseTableNames.add(rsNamePattern.getString("TABLE_NAME")); + } + assertTrue(baseTableNames.contains("base_table")); + assertTrue(baseTableNames.contains("base_table_clone")); + assertTrue(baseTableNames.contains("base_table_snapshot")); + assertEquals(3, baseTableNames.size()); + + // --- Test Case 10: No matching table --- + ResultSet rsNoMatch = + databaseMetaData.getTables(PROJECT_ID, DATASET, "nonexistent_table", null); + assertFalse(rsNoMatch.next()); + + // --- Test Case 11: Null type in array --- + ResultSet rsNullType = + databaseMetaData.getTables(PROJECT_ID, DATASET, null, new String[] {null, "VIEW"}); + assertTrue(rsNullType.next()); + assertEquals("VIEW", rsNullType.getString("TABLE_TYPE")); + assertEquals("my_view", rsNullType.getString("TABLE_NAME")); + assertFalse(rsNullType.next()); + } + + @Test + public void testDatabaseMetadataGetSchemas() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + + // Test case 1: Get all schemas with catalog and check for the presence of specific schemas + ResultSet rsAll = databaseMetaData.getSchemas(PROJECT_ID, null); + Set actualSchemas = new HashSet<>(); + while (rsAll.next()) { + assertEquals(PROJECT_ID, rsAll.getString("TABLE_CATALOG")); + actualSchemas.add(rsAll.getString("TABLE_SCHEM")); + } + assertTrue(actualSchemas.contains("JDBC_INTEGRATION_DATASET")); + assertTrue(actualSchemas.contains("JDBC_TABLE_TYPES_TEST")); + assertTrue(actualSchemas.contains("ODBC_TEST_DATASET")); + + // Test case 2: Get schemas with catalog and schemaPattern matching "JDBC_NIGHTLY_IT_DATASET" + ResultSet rsPattern = databaseMetaData.getSchemas(PROJECT_ID, "JDBC_NIGHTLY_IT_DATASET"); + Set actualSchemasPattern = new HashSet<>(); + while (rsPattern.next()) { + assertEquals(PROJECT_ID, rsPattern.getString("TABLE_CATALOG")); + actualSchemasPattern.add(rsPattern.getString("TABLE_SCHEM")); + } + assertTrue(actualSchemasPattern.contains("JDBC_NIGHTLY_IT_DATASET")); + assertEquals(1, actualSchemasPattern.size()); + + // Test case 3: Get schemas with catalog and schemaPattern matching "nonexistent" + ResultSet rsNoMatch = databaseMetaData.getSchemas(PROJECT_ID, "nonexistent"); + assertFalse(rsNoMatch.next()); + + // Test case 4: Get schemas with non-existent catalog + rsNoMatch = databaseMetaData.getSchemas("invalid-catalog", null); + assertFalse(rsNoMatch.next()); + } + + @Test + public void testDatabaseMetadataGetSchemasNoArgs() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String expectedCatalog = bigQueryConnection.getCatalog(); + assertNotNull("Project ID (catalog) from connection should not be null", expectedCatalog); + + // Test case: Get all schemas (datasets) for the current project + try (ResultSet rsAll = databaseMetaData.getSchemas()) { + assertNotNull("ResultSet from getSchemas() should not be null", rsAll); + boolean foundTestDataset = false; + int rowCount = 0; + while (rsAll.next()) { + rowCount++; + assertEquals( + "TABLE_CATALOG should match the connection's project ID", + expectedCatalog, + rsAll.getString("TABLE_CATALOG")); + String schemaName = rsAll.getString("TABLE_SCHEM"); + assertNotNull("TABLE_SCHEM should not be null", schemaName); + if (DATASET.equals(schemaName) + || DATASET2.equals(schemaName) + || CONSTRAINTS_DATASET.equals(schemaName) + || "JDBC_TABLE_TYPES_TEST".equals(schemaName) + || "JDBC_INTEGRATION_DATASET".equals(schemaName)) { + foundTestDataset = true; + } + } + assertTrue("At least one of the known test datasets should be found", foundTestDataset); + assertTrue("Should retrieve at least one schema/dataset", rowCount > 0); + } + } + + @Test + public void testDatabaseMetaDataGetFunctions() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String testSchema = "JDBC_TABLE_TYPES_TEST"; + String testCatalog = PROJECT_ID; + + Set expectedFunctionNames = + new HashSet<>( + Arrays.asList( + "complex_scalar_sql_udf", + "persistent_sql_udf_named_params", + "scalar_js_udf", + "scalar_sql_udf")); + + // Test 1: Get all functions from a specific schema + ResultSet rsAll = databaseMetaData.getFunctions(testCatalog, testSchema, null); + Set foundFunctionNames = new HashSet<>(); + int countAll = 0; + while (rsAll.next()) { + countAll++; + assertEquals(testCatalog, rsAll.getString("FUNCTION_CAT")); + assertEquals(testSchema, rsAll.getString("FUNCTION_SCHEM")); + String funcName = rsAll.getString("FUNCTION_NAME"); + foundFunctionNames.add(funcName); + assertNull(rsAll.getString("REMARKS")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsAll.getShort("FUNCTION_TYPE")); + assertEquals(funcName, rsAll.getString("SPECIFIC_NAME")); + } + assertEquals( + "Should find all " + expectedFunctionNames.size() + " functions in " + testSchema, + expectedFunctionNames.size(), + countAll); + assertEquals(expectedFunctionNames, foundFunctionNames); + rsAll.close(); + + // Test 2: Get a specific function using functionNamePattern + String specificFunctionName = "scalar_sql_udf"; + ResultSet rsSpecific = + databaseMetaData.getFunctions(testCatalog, testSchema, specificFunctionName); + assertTrue("Should find the specific function " + specificFunctionName, rsSpecific.next()); + assertEquals(testCatalog, rsSpecific.getString("FUNCTION_CAT")); + assertEquals(testSchema, rsSpecific.getString("FUNCTION_SCHEM")); + assertEquals(specificFunctionName, rsSpecific.getString("FUNCTION_NAME")); + assertNull(rsSpecific.getString("REMARKS")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsSpecific.getShort("FUNCTION_TYPE")); + assertEquals(specificFunctionName, rsSpecific.getString("SPECIFIC_NAME")); + assertFalse("Should only find one row for exact function match", rsSpecific.next()); + rsSpecific.close(); + + // Test 3: Get functions using a wildcard functionNamePattern "scalar%" + // Expected order due to sorting: scalar_js_udf, scalar_sql_udf + ResultSet rsWildcard = databaseMetaData.getFunctions(testCatalog, testSchema, "scalar%"); + assertTrue("Should find functions matching 'scalar%'", rsWildcard.next()); + assertEquals("scalar_js_udf", rsWildcard.getString("FUNCTION_NAME")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsWildcard.getShort("FUNCTION_TYPE")); + + assertTrue("Should find the second function matching 'scalar%'", rsWildcard.next()); + assertEquals("scalar_sql_udf", rsWildcard.getString("FUNCTION_NAME")); + assertEquals(DatabaseMetaData.functionResultUnknown, rsWildcard.getShort("FUNCTION_TYPE")); + assertFalse("Should be no more functions matching 'scalar%'", rsWildcard.next()); + rsWildcard.close(); + + // Test 4: Schema pattern with wildcard + ResultSet rsSchemaWildcard = + databaseMetaData.getFunctions(testCatalog, "JDBC_TABLE_TYPES_T%", "complex_scalar_sql_udf"); + assertTrue("Should find function with schema wildcard", rsSchemaWildcard.next()); + assertEquals(testSchema, rsSchemaWildcard.getString("FUNCTION_SCHEM")); + assertEquals("complex_scalar_sql_udf", rsSchemaWildcard.getString("FUNCTION_NAME")); + assertFalse( + "Should only find one row for this schema wildcard and specific function", + rsSchemaWildcard.next()); + rsSchemaWildcard.close(); + + // Test 5: Non-existent function + ResultSet rsNonExistentFunc = + databaseMetaData.getFunctions(testCatalog, testSchema, "non_existent_function_xyz123"); + assertFalse("Should not find a non-existent function", rsNonExistentFunc.next()); + rsNonExistentFunc.close(); + + // Test 6: Non-existent schema + ResultSet rsNonExistentSchema = + databaseMetaData.getFunctions(testCatalog, "NON_EXISTENT_SCHEMA_XYZ123", null); + assertFalse("Should not find functions in a non-existent schema", rsNonExistentSchema.next()); + rsNonExistentSchema.close(); + + // Test 7: Empty schema pattern + ResultSet rsEmptySchema = databaseMetaData.getFunctions(testCatalog, "", null); + assertFalse("Empty schema pattern should return no results", rsEmptySchema.next()); + rsEmptySchema.close(); + + // Test 8: Empty function name pattern + ResultSet rsEmptyFunction = databaseMetaData.getFunctions(testCatalog, testSchema, ""); + assertFalse("Empty function name pattern should return no results", rsEmptyFunction.next()); + rsEmptyFunction.close(); + + // Test 9: Null catalog + ResultSet rsNullCatalog = databaseMetaData.getFunctions(null, testSchema, null); + assertFalse("Null catalog should return no results", rsNullCatalog.next()); + rsNullCatalog.close(); + } + + @Test + public void testDatabaseMetadataGetFunctionColumns() throws SQLException { + DatabaseMetaData databaseMetaData = bigQueryConnection.getMetaData(); + String testCatalog = PROJECT_ID; + String testSchema = "JDBC_TABLE_TYPES_TEST"; + + // Test Case 1: Specific function 'scalar_sql_udf', specific column 'x' + String specificFunction1 = "scalar_sql_udf"; + String specificColumn1 = "x"; + ResultSet rs = + databaseMetaData.getFunctionColumns( + testCatalog, testSchema, specificFunction1, specificColumn1); + + assertTrue("Should find column 'x' for function 'scalar_sql_udf'", rs.next()); + assertEquals(testCatalog, rs.getString("FUNCTION_CAT")); + assertEquals(testSchema, rs.getString("FUNCTION_SCHEM")); + assertEquals(specificFunction1, rs.getString("FUNCTION_NAME")); + assertEquals(specificColumn1, rs.getString("COLUMN_NAME")); + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.BIGINT, rs.getInt("DATA_TYPE")); + assertEquals("BIGINT", rs.getString("TYPE_NAME")); + assertEquals(19, rs.getInt("PRECISION")); + assertEquals(null, rs.getObject("LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(0, rs.getShort("SCALE")); + assertEquals(10, rs.getShort("RADIX")); + assertEquals(DatabaseMetaData.functionNullableUnknown, rs.getShort("NULLABLE")); + assertNull(rs.getString("REMARKS")); + assertEquals(null, rs.getObject("CHAR_OCTET_LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + assertEquals("", rs.getString("IS_NULLABLE")); + assertEquals(specificFunction1, rs.getString("SPECIFIC_NAME")); + assertFalse("Should only find one row for exact column match", rs.next()); + rs.close(); + + // Test Case 2: Specific function 'complex_scalar_sql_udf', specific column 'arr' + String specificFunction2 = "complex_scalar_sql_udf"; + String specificColumn2 = "arr"; + rs = + databaseMetaData.getFunctionColumns( + testCatalog, testSchema, specificFunction2, specificColumn2); + assertTrue("Should find column 'arr' for function 'complex_scalar_sql_udf'", rs.next()); + assertEquals(testCatalog, rs.getString("FUNCTION_CAT")); + assertEquals(testSchema, rs.getString("FUNCTION_SCHEM")); + assertEquals(specificFunction2, rs.getString("FUNCTION_NAME")); + assertEquals(specificColumn2, rs.getString("COLUMN_NAME")); + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.ARRAY, rs.getInt("DATA_TYPE")); + assertEquals("ARRAY", rs.getString("TYPE_NAME")); + assertEquals(null, rs.getObject("PRECISION")); + assertTrue(rs.wasNull()); + assertEquals(null, rs.getObject("LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(null, rs.getObject("SCALE")); + assertTrue(rs.wasNull()); + assertEquals(null, rs.getObject("RADIX")); + assertTrue(rs.wasNull()); + assertEquals(DatabaseMetaData.functionNullableUnknown, rs.getShort("NULLABLE")); + assertNull(rs.getString("REMARKS")); + assertEquals(null, rs.getObject("CHAR_OCTET_LENGTH")); + assertTrue(rs.wasNull()); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + assertEquals("", rs.getString("IS_NULLABLE")); + assertEquals(specificFunction2, rs.getString("SPECIFIC_NAME")); + assertFalse("Should only find one row for exact column match", rs.next()); + rs.close(); + + // Test Case 3: All columns for 'persistent_sql_udf_named_params' (sorted by ordinal position) + String specificFunction3 = "persistent_sql_udf_named_params"; + rs = databaseMetaData.getFunctionColumns(testCatalog, testSchema, specificFunction3, null); + assertTrue("Should find columns for " + specificFunction3, rs.next()); + assertEquals(specificFunction3, rs.getString("FUNCTION_NAME")); + assertEquals("value1", rs.getString("COLUMN_NAME")); // Ordinal Position 1 + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.BIGINT, rs.getInt("DATA_TYPE")); + assertEquals("BIGINT", rs.getString("TYPE_NAME")); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + + assertTrue("Should find second column for " + specificFunction3, rs.next()); + assertEquals(specificFunction3, rs.getString("FUNCTION_NAME")); + assertEquals("value-two", rs.getString("COLUMN_NAME")); // Ordinal Position 2 + assertEquals(DatabaseMetaData.functionColumnUnknown, rs.getShort("COLUMN_TYPE")); + assertEquals(Types.NVARCHAR, rs.getInt("DATA_TYPE")); + assertEquals("NVARCHAR", rs.getString("TYPE_NAME")); + assertEquals(2, rs.getInt("ORDINAL_POSITION")); + assertFalse("Should be no more columns for " + specificFunction3, rs.next()); + rs.close(); + + // Test Case 4: Wildcard for function name "scalar%", specific column name "x" + rs = databaseMetaData.getFunctionColumns(testCatalog, testSchema, "scalar%", "x"); + assertTrue("Should find column 'x' for functions matching 'scalar%'", rs.next()); + assertEquals("scalar_sql_udf", rs.getString("FUNCTION_NAME")); + assertEquals("x", rs.getString("COLUMN_NAME")); + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + assertFalse("Should be no more columns named 'x' for functions matching 'scalar%'", rs.next()); + rs.close(); + + // Test Case 5: Wildcard for column name "%" for 'scalar_js_udf' + String specificFunction4 = "scalar_js_udf"; + rs = databaseMetaData.getFunctionColumns(testCatalog, testSchema, specificFunction4, "%"); + assertTrue("Should find columns for " + specificFunction4 + " with wildcard", rs.next()); + assertEquals(specificFunction4, rs.getString("FUNCTION_NAME")); + assertEquals("name", rs.getString("COLUMN_NAME")); // Ordinal Position 1 + assertEquals(1, rs.getInt("ORDINAL_POSITION")); + + assertTrue("Should find second column for " + specificFunction4 + " with wildcard", rs.next()); + assertEquals(specificFunction4, rs.getString("FUNCTION_NAME")); + assertEquals("age", rs.getString("COLUMN_NAME")); // Ordinal Position 2 + assertEquals(2, rs.getInt("ORDINAL_POSITION")); + assertFalse("Should be no more columns for " + specificFunction4 + " with wildcard", rs.next()); + rs.close(); + + // Test Case 6: Non-existent function + rs = + databaseMetaData.getFunctionColumns( + testCatalog, testSchema, "non_existent_function_xyz", null); + assertFalse("Should not find columns for a non-existent function", rs.next()); + rs.close(); + } + + @Test + public void testRangeDataTypeWithJsonResultSet() throws SQLException { + String RANGE_DATA_TABLE = "JDBC_RANGE_DATA_TEST_TABLE_" + random.nextInt(99); + String range_date_literal = "RANGE '[2020-01-01, 2020-01-31)'"; + String range_datetime_literal = "RANGE '[2020-01-01 12:00:00, 2020-01-31 12:00:00)'"; + String range_timestamp_literal = + "RANGE '[2020-01-01 12:00:00+08, 2020-01-31 12:00:00+08)'"; + + String createRangeTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `range_date` RANGE," + + " `range_date_time` RANGE, `range_timestamp` RANGE);", + DATASET, RANGE_DATA_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, range_date, range_date_time, range_timestamp) VALUES (1, %s," + + " %s, %s);", + DATASET, + RANGE_DATA_TABLE, + range_date_literal, + range_datetime_literal, + range_timestamp_literal); + String selectQuery = + String.format( + "SELECT id, range_date, range_date_time, range_timestamp FROM %s.%s WHERE id = 1;", + DATASET, RANGE_DATA_TABLE); + + boolean status = bigQueryStatement.execute(createRangeTable); + assertFalse(status); + + status = bigQueryStatement.execute(insertQuery); + assertFalse(status); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + + Integer numRows = 0; + String actual_range_date = ""; + String actual_range_datetime = ""; + String actual_range_timestamp = ""; + + while (resultSet.next()) { + numRows++; + actual_range_date = resultSet.getString("range_date"); + actual_range_datetime = resultSet.getString("range_date_time"); + actual_range_timestamp = resultSet.getString("range_timestamp"); + } + + String expected_range_date = "[2020-01-01, 2020-01-31)"; + String expected_range_datetime = "[2020-01-01T12:00:00, 2020-01-31T12:00:00)"; + String expected_range_timestamp = "[1577851200.000000, 1580443200.000000)"; + + assertThat(numRows).isEqualTo(1); + assertThat(actual_range_date).isEqualTo(expected_range_date); + assertThat(actual_range_datetime).isEqualTo(expected_range_datetime); + assertThat(actual_range_timestamp).isEqualTo(expected_range_timestamp); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, RANGE_DATA_TABLE)); + } + + @Test + public void testRangeDataTypeWithArrowResultSet() throws SQLException { + String selectQuery = + "select * from `DATATYPERANGETEST.RangeIntervalTestTable` order by intColumn limit 5000;"; + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";MaxResults=500;HighThroughputActivationRatio=1;" + + "HighThroughputMinTableSize=100;" + + "EnableHighThroughputAPI=1;JobCreationMode=1;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertTrue(resultSet.getClass().getName().contains("BigQueryArrowResultSet")); + resultSet.next(); + assertEquals("[2024-07-14, 2024-09-23)", resultSet.getString("rangeField")); + connection.close(); + } + + @Test + public void testPrepareCallSql() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc"); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamIndex() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamName() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamIndexScale() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testRegisterOutParamNameScale() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallSqlResultSetTypeConcurrency() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamIndex() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamName() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamIndexScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallConcurrencyRegisterOutParamNameScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallSqlResultSetTypeConcurrencyHoldability() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamIndex() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.registerOutParameter(1, Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamName() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.VARCHAR); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamIndexScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.close(); + } + + @Test + public void testPrepareCallHoldabilityRegisterOutParamNameScale() throws SQLException { + CallableStatement callableStatement = + this.bigQueryConnection.prepareCall( + "call testProc('?')", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.CLOSE_CURSORS_AT_COMMIT); + assertNotNull(callableStatement); + callableStatement.registerOutParameter("ParamKey", Types.NUMERIC, 2); + callableStatement.close(); + } + + @Test + public void testPrepareCallFailureResultSetType() throws SQLException { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> + this.bigQueryConnection.prepareCall( + "call testProc", ResultSet.TYPE_SCROLL_SENSITIVE, ResultSet.CONCUR_READ_ONLY)); + } + + @Test + public void testPrepareCallFailureResultSetConcurrency() throws SQLException { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> + this.bigQueryConnection.prepareCall( + "call testProc", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_UPDATABLE)); + } + + @Test + public void testPrepareCallFailureResultSetHoldability() throws SQLException { + assertThrows( + BigQueryJdbcSqlFeatureNotSupportedException.class, + () -> + this.bigQueryConnection.prepareCall( + "call testProc", + ResultSet.TYPE_FORWARD_ONLY, + ResultSet.CONCUR_READ_ONLY, + ResultSet.HOLD_CURSORS_OVER_COMMIT)); + } + + // Integration tests for CallableStatement Setters and Getters + @Test + public void testSetterGetterBigDecimal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + BigDecimal expected = new BigDecimal(12344); + callableStatement.setBigDecimal(CALLABLE_STMT_PARAM_KEY, expected); + BigDecimal actual = callableStatement.getBigDecimal(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterBoolean() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Boolean expected = true; + callableStatement.setBoolean(CALLABLE_STMT_PARAM_KEY, expected); + Boolean actual = callableStatement.getBoolean(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterByte() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Byte expected = "hello".getBytes()[0]; + callableStatement.setByte(CALLABLE_STMT_PARAM_KEY, expected); + Byte actual = callableStatement.getByte(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterBytes() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + byte[] expected = "hello".getBytes(); + callableStatement.setBytes(CALLABLE_STMT_PARAM_KEY, expected); + byte[] actual = callableStatement.getBytes(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterDate() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Date expected = new Date(1234567); + callableStatement.setDate(CALLABLE_STMT_PARAM_KEY, expected); + Date actual = callableStatement.getDate(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterDateCal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Date expected = new Date(1L); + Calendar cal = Calendar.getInstance(); + callableStatement.setDate(CALLABLE_STMT_PARAM_KEY, expected, cal); + Date actual = callableStatement.getDate(CALLABLE_STMT_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterDouble() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Double expected = 123.2345; + callableStatement.setDouble(CALLABLE_STMT_PARAM_KEY, expected); + Double actual = callableStatement.getDouble(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterFloat() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Float expected = 123.2345F; + callableStatement.setFloat(CALLABLE_STMT_PARAM_KEY, expected); + Float actual = callableStatement.getFloat(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterInt() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Integer expected = 123; + callableStatement.setInt(CALLABLE_STMT_PARAM_KEY, expected); + Integer actual = callableStatement.getInt(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterLong() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Long expected = 123L; + callableStatement.setLong(CALLABLE_STMT_PARAM_KEY, expected); + Long actual = callableStatement.getLong(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterNString() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setNString(CALLABLE_STMT_PARAM_KEY, expected); + String actual = callableStatement.getNString(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterObject() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setObject(CALLABLE_STMT_PARAM_KEY, expected); + Object actual = callableStatement.getObject(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterObjectWithSQLType() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setObject(CALLABLE_STMT_PARAM_KEY, expected, Types.NVARCHAR); + Object actual = callableStatement.getObject(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterObjectWithSqlTypeAndScale() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "heelo"; + callableStatement.setObject(CALLABLE_STMT_PARAM_KEY, expected, Types.NVARCHAR, 0); + Object actual = callableStatement.getObject(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterString() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + String expected = "123"; + callableStatement.setString(CALLABLE_STMT_PARAM_KEY, expected); + String actual = callableStatement.getString(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTime() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Time expected = new Time(1234567); + callableStatement.setTime(CALLABLE_STMT_PARAM_KEY, expected); + Time actual = callableStatement.getTime(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTimeCal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Time expected = new Time(1L); + Calendar cal = Calendar.getInstance(); + callableStatement.setTime(CALLABLE_STMT_PARAM_KEY, expected, cal); + Time actual = callableStatement.getTime(CALLABLE_STMT_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTimestamp() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Timestamp expected = new Timestamp(1234567); + callableStatement.setTimestamp(CALLABLE_STMT_PARAM_KEY, expected); + Timestamp actual = callableStatement.getTimestamp(CALLABLE_STMT_PARAM_KEY); + assertEquals(expected, actual); + } + + @Test + public void testSetterGetterTimestampCal() throws SQLException { + CallableStatement callableStatement = this.bigQueryConnection.prepareCall("call testProc('?')"); + assertNotNull(callableStatement); + Timestamp expected = new Timestamp(1L); + Calendar cal = Calendar.getInstance(); + callableStatement.setTimestamp(CALLABLE_STMT_PARAM_KEY, expected, cal); + Timestamp actual = callableStatement.getTimestamp(CALLABLE_STMT_PARAM_KEY, cal); + assertEquals(expected, actual); + } + + @Test + public void testPooledConnectionDataSourceSuccess() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + } + + @Test + public void testPooledConnectionDataSourceFailNoConnectionURl() throws SQLException { + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + + assertThrows(BigQueryJdbcException.class, () -> pooledDataSource.getPooledConnection()); + } + + @Test + public void testPooledConnectionDataSourceFailInvalidConnectionURl() { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;" + + "ListenerPoolSize=invalid"; + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + assertThrows(NumberFormatException.class, () -> pooledDataSource.getPooledConnection()); + } + + @Test + public void testPooledConnectionAddConnectionListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionRemoveConnectionListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.removeConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionConnectionClosed() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + connection.close(); + assertEquals(1, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionClose() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + + pooledConnection.close(); + assertEquals(1, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + } + + @Test + public void testPooledConnectionConnectionError() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + TestConnectionListener listener = new TestConnectionListener(); + pooledConnection.addConnectionEventListener(listener); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(0, listener.getConnectionErrorCount()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + ExecutorService executor = Executors.newFixedThreadPool(3); + connection.abort(executor); + assertEquals(0, listener.getConnectionClosedCount()); + assertEquals(1, listener.getConnectionErrorCount()); + + executor.shutdown(); + connection.close(); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerAddListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerRemoveListener() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.removeConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerCloseConnection() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + connection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + pooledConnection.close(); + } + + @Test + public void testPooledConnectionListenerClosePooledConnection() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + pooledConnection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + } + + @Test + public void testPooledConnectionListenerConnectionError() throws SQLException { + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;OAuthType=3;ProjectId=testProject;ConnectionPoolSize=20;ListenerPoolSize=20;"; + + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionUrl); + + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = new PooledConnectionListener(DEFAULT_CONN_POOL_SIZE); + pooledConnection.addConnectionEventListener(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + ExecutorService executor = Executors.newFixedThreadPool(3); + connection.abort(executor); + assertTrue(listener.isConnectionPoolEmpty()); + + executor.shutdown(); + connection.close(); + pooledConnection.close(); + } + + @Test + public void testExecuteQueryWithConnectionPoolingEnabledDefaultPoolSize() throws SQLException { + String connectionURL = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";"; + assertConnectionPoolingResults(connectionURL, DEFAULT_CONN_POOL_SIZE); + } + + @Test + public void testExecuteQueryWithConnectionPoolingEnabledCustomPoolSize() throws SQLException { + String connectionURL = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";" + + "ConnectionPoolSize=" + + CUSTOM_CONN_POOL_SIZE + + ";"; + assertConnectionPoolingResults(connectionURL, CUSTOM_CONN_POOL_SIZE); + } + + private void assertConnectionPoolingResults(String connectionURL, Long connectionPoolSize) + throws SQLException { + // Create Pooled Connection Datasource + PooledConnectionDataSource pooledDataSource = new PooledConnectionDataSource(); + pooledDataSource.setURL(connectionURL); + + // Get pooled connection and ensure listner was added with default connection pool size. + PooledConnection pooledConnection = pooledDataSource.getPooledConnection(); + assertNotNull(pooledConnection); + PooledConnectionListener listener = pooledDataSource.getConnectionPoolManager(); + assertNotNull(listener); + assertTrue(listener.isConnectionPoolEmpty()); + + // Get Underlying physical connection + Connection connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + + // Execute query with physical connection + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + + // Close physical connection + connection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertEquals(connectionPoolSize, listener.getConnectionPoolSize()); + + // Reuse same physical connection. + connection = pooledConnection.getConnection(); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertEquals(connectionPoolSize, listener.getConnectionPoolSize()); + + // Execute query with reusable physical connection + jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + + // Return connection back to the pool. + connection.close(); + assertFalse(listener.isConnectionPoolEmpty()); + assertEquals(1, listener.getConnectionPoolCurrentCapacity()); + assertEquals(connectionPoolSize, listener.getConnectionPoolSize()); + pooledConnection.close(); + } + + @Test + public void testAdditionalProjectsInMetadata() throws SQLException { + String additionalProjectsValue = "bigquery-public-data"; + String datasetInAdditionalProject = "baseball"; + + String urlWithAdditionalProjects = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=" + + PROJECT_ID + + ";OAuthType=3" + + ";AdditionalProjects=" + + additionalProjectsValue; + + try (Connection conn = DriverManager.getConnection(urlWithAdditionalProjects)) { + DatabaseMetaData dbMetaData = conn.getMetaData(); + + // 1. Test getCatalogs() + Set foundCatalogs = new HashSet<>(); + try (ResultSet catalogsRs = dbMetaData.getCatalogs()) { + while (catalogsRs.next()) { + foundCatalogs.add(catalogsRs.getString("TABLE_CAT")); + } + } + assertTrue( + "getCatalogs() should contain the primary project ID", + foundCatalogs.contains(PROJECT_ID)); + assertTrue( + "getCatalogs() should contain the additional project ID", + foundCatalogs.contains(additionalProjectsValue)); + + // 2. Test getSchemas() + Set catalogsForSchemasFromAll = new HashSet<>(); + boolean foundAdditionalDataset = false; + try (ResultSet schemasRs = dbMetaData.getSchemas()) { + while (schemasRs.next()) { + String schemaName = schemasRs.getString("TABLE_SCHEM"); + String catalogName = schemasRs.getString("TABLE_CATALOG"); + catalogsForSchemasFromAll.add(catalogName); + if (additionalProjectsValue.equals(catalogName) + && datasetInAdditionalProject.equals(schemaName)) { + foundAdditionalDataset = true; + } + } + } + assertTrue( + "getSchemas() should list datasets from the primary project", + catalogsForSchemasFromAll.contains(PROJECT_ID)); + assertTrue( + "getSchemas() should list datasets from the additional project", + catalogsForSchemasFromAll.contains(additionalProjectsValue)); + assertTrue( + "Known dataset from additional project not found in getSchemas()", + foundAdditionalDataset); + + } catch (SQLException e) { + System.err.println("SQL Error during AdditionalProjects test: " + e.getMessage()); + throw e; + } + } + + @Test + public void testFilterTablesOnDefaultDataset_getTables() throws SQLException { + String defaultDatasetValue = CONSTRAINTS_DATASET; + String table1InDefaultDataset = CONSTRAINTS_TABLE_NAME; + String table2InDefaultDataset = CONSTRAINTS_TABLE_NAME2; + + String specificDatasetValue = "JDBC_TABLE_TYPES_TEST"; + String table1InSpecificDataset = "base_table"; + String table2InSpecificDataset = "external_table"; + + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=" + + PROJECT_ID + + ";OAuthType=3" + + ";DefaultDataset=" + + defaultDatasetValue + + ";FilterTablesOnDefaultDataset=1"; + try (Connection conn = DriverManager.getConnection(connectionUrl)) { + DatabaseMetaData dbMetaData = conn.getMetaData(); + + // Case 1: Catalog and schemaPattern are null/wildcard, should use DefaultDataset + try (ResultSet rs = dbMetaData.getTables(null, null, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InDefaultDataset)); + assertTrue(tableNames.contains(table2InDefaultDataset)); + } + + // Case 2: Explicit schemaPattern overrides DefaultDataset + try (ResultSet rs = dbMetaData.getTables(null, specificDatasetValue, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InSpecificDataset)); + assertTrue(tableNames.contains(table2InSpecificDataset)); + } + + // Case 3: Explicit catalog, schemaPattern is null/wildcard, should use DefaultDataset within + // that catalog + try (ResultSet rs = dbMetaData.getTables(PROJECT_ID, null, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InDefaultDataset)); + assertTrue(tableNames.contains(table2InDefaultDataset)); + } + + // Case 4: Explicit catalog and schemaPattern override DefaultDataset + try (ResultSet rs = dbMetaData.getTables(PROJECT_ID, specificDatasetValue, null, null)) { + Set tableNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + tableNames.add(rs.getString("TABLE_NAME")); + } + assertTrue(tableNames.contains(table1InSpecificDataset)); + assertTrue(tableNames.contains(table2InSpecificDataset)); + } + } + } + + @Test + public void testFilterTablesOnDefaultDataset_getColumns() throws SQLException { + String defaultDatasetValue = CONSTRAINTS_DATASET; + String tableInDefaultDataset = CONSTRAINTS_TABLE_NAME; + String[] columnsInDefaultTable = {"id", "name", "second_name", "address"}; + + String specificDatasetValue = "JDBC_TABLE_TYPES_TEST"; + String tableInSpecificDataset = "base_table"; + String[] columnsInSpecificTable = {"id", "name", "created_at"}; + + String connectionUrl = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;ProjectId=" + + PROJECT_ID + + ";OAuthType=3" + + ";DefaultDataset=" + + defaultDatasetValue + + ";FilterTablesOnDefaultDataset=1"; + + try (Connection conn = DriverManager.getConnection(connectionUrl)) { + DatabaseMetaData dbMetaData = conn.getMetaData(); + + // Case 1: Catalog and schemaPattern are null/wildcard, should use DefaultDataset + try (ResultSet rs = dbMetaData.getColumns(null, null, tableInDefaultDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInDefaultDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInDefaultTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInDefaultTable.length, columnNames.size()); + } + + // Case 2: Explicit schemaPattern overrides DefaultDataset + try (ResultSet rs = + dbMetaData.getColumns(null, specificDatasetValue, tableInSpecificDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInSpecificDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInSpecificTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInSpecificTable.length, columnNames.size()); + } + + // Case 3: Explicit catalog, schemaPattern is null/wildcard, should use DefaultDataset within + // that catalog + try (ResultSet rs = dbMetaData.getColumns(PROJECT_ID, null, tableInDefaultDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(defaultDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInDefaultDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInDefaultTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInDefaultTable.length, columnNames.size()); + } + + // Case 4: Explicit catalog and schemaPattern override DefaultDataset + try (ResultSet rs = + dbMetaData.getColumns(PROJECT_ID, specificDatasetValue, tableInSpecificDataset, null)) { + Set columnNames = new HashSet<>(); + while (rs.next()) { + assertEquals(PROJECT_ID, rs.getString("TABLE_CAT")); + assertEquals(specificDatasetValue, rs.getString("TABLE_SCHEM")); + assertEquals(tableInSpecificDataset, rs.getString("TABLE_NAME")); + columnNames.add(rs.getString("COLUMN_NAME")); + } + for (String expectedCol : columnsInSpecificTable) { + assertTrue(columnNames.contains(expectedCol)); + } + assertEquals(columnsInSpecificTable.length, columnNames.size()); + } + } + } + + @Test + public void testAlterTable() throws SQLException { + String TABLE_NAME = "JDBC_ALTER_TABLE_" + randomNumber; + String createQuery = + String.format("CREATE OR REPLACE TABLE %s.%s (`StringField` STRING);", DATASET, TABLE_NAME); + String addColumnQuery = + String.format("ALTER TABLE %s.%s ADD COLUMN `IntegerField` INTEGER;", DATASET, TABLE_NAME); + String dropColumnQuery = + String.format( + "UPDATE %s.%s SET StringField='Jane Doe' WHERE IntegerField=111", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + int createStatus = bigQueryStatement.executeUpdate(createQuery); + assertEquals(0, createStatus); + + int addColumnStatus = bigQueryStatement.executeUpdate(addColumnQuery); + assertEquals(0, addColumnStatus); + + bigQueryStatement.executeQuery(selectQuery); + int selectStatus = bigQueryStatement.getUpdateCount(); + assertEquals(-1, selectStatus); + + int dropColumnStatus = bigQueryStatement.executeUpdate(dropColumnQuery); + assertEquals(0, dropColumnStatus); + + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TABLE_NAME)); + } + + public void testQueryPropertyDataSetProjectIdQueriesToCorrectDataset() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=dataset_project_id=" + + PROJECT_ID + + ";"; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'Farhan', 25);", + "INTEGRATION_TESTS", "Test_Table"); + String selectQuery = + "SELECT * FROM `bigquery-devtools-drivers.INTEGRATION_TESTS.Test_Table` WHERE age=25;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + statement.execute(insertQuery); + + // assertions + boolean result = statement.execute(selectQuery); + assertTrue(result); + + // clean up + String deleteQuery = + String.format("DELETE FROM %s.%s WHERE age=25", "INTEGRATION_TESTS", "Test_Table"); + statement.execute(deleteQuery); + connection.close(); + } + + @Test + public void testQueryPropertyDataSetProjectIdQueriesToIncorrectDatasetThrows() + throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=dataset_project_id=bigquerytestdefault" + + ";"; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'Farhan', 25);", + "INTEGRATION_TESTS", "Test_Table"); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows(BigQueryJdbcException.class, () -> statement.execute(insertQuery)); + connection.close(); + } + + @Test + public void testQueryPropertyTimeZoneQueries() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=time_zone=America/New_York;"; + String query = "SELECT * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(query); + + // assertions + assertNotNull(resultSet); + assertTrue(resultSet.next()); + connection.close(); + } + + @Test + public void testQueryPropertySessionIdSetsStatementSession() + throws SQLException, InterruptedException { + String sessionId = getSessionId(); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=session_id=" + + sessionId + + ";"; + String selectQuery = + "INSERT INTO `bigquery-devtools-drivers.JDBC_INTEGRATION_DATASET.No_KMS_Test_table` (id," + + " name, age) VALUES (132, 'Batman', 531);"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + boolean resultSet = statement.execute(selectQuery); + + // assertions + assertFalse(resultSet); + + // clean up + String deleteQuery = + String.format("DELETE FROM %s.%s WHERE age=25", "INTEGRATION_TESTS", "Test_Table"); + statement.execute(deleteQuery); + connection.close(); + } + + @Test + public void testEncryptedTableWithKmsQueries() throws SQLException { + // setup + String KMSKeyName = requireEnvVar("KMS_RESOURCE_PATH"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";KMSKeyName=" + + KMSKeyName + + ";"; + String selectQuery = "SELECT * FROM `JDBC_INTEGRATION_DATASET.KMS_Test_table`;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectQuery); + + // assertions for data not encrypted + assertNotNull(resultSet); + assertTrue(resultSet.next()); + assertEquals("Farhan", resultSet.getString("name")); + connection.close(); + } + + @Test + public void testIncorrectKmsThrows() throws SQLException { + String KMSKeyName = requireEnvVar("KMS_RESOURCE_PATH"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";KMSKeyName=" + + KMSKeyName + + ";"; + String selectQuery = + "INSERT INTO `bigquery-devtools-drivers.JDBC_INTEGRATION_DATASET.No_KMS_Test_table` (id," + + " name, age) VALUES (132, 'Batman', 531);"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows(BigQueryJdbcException.class, () -> statement.execute(selectQuery)); + connection.close(); + } + + @Test + public void testQueryPropertyServiceAccountFollowsIamPermission() throws SQLException { + final String SERVICE_ACCOUNT_EMAIL = requireEnvVar("SA_EMAIL"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=service_account=" + + SERVICE_ACCOUNT_EMAIL + + ";"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(String.format(BASE_QUERY, 100)); + + // assertions + assertNotNull(resultSet); + assertTrue(resultSet.next()); + connection.close(); + } + + @Test + public void testValidLegacySQLStatement() throws SQLException { + String legacyJoinQuery = + "SELECT\n" + + " repo_name\n" + + "FROM\n" + + " [bigquery-public-data.github_repos.commits],\n" + + " [bigquery-public-data.github_repos.sample_commits] LIMIT 10"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;"; + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + + boolean result = statement.execute(legacyJoinQuery); + assertTrue(result); + connection.close(); + } + + @Test + public void testMultipleTransactionsThrowsUnsupported() throws SQLException { + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.execute("BEGIN TRANSACTION;")); + connection.close(); + } + + @Test + public void testConnectionWithMultipleTransactionCommits() throws SQLException { + String TRANSACTION_TABLE = "JDBC_MULTI_COMMIT_TABLE" + randomNumber; + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'DwightShrute', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + + Connection connection = DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + + Statement statement = connection.createStatement(); + statement.execute(insertQuery); + statement.execute(updateQuery); + connection.commit(); // First transaction + + // After commit, a new transaction should have started. + // Executing another query and then rolling it back. + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'MichaelScott', 25);", + DATASET, TRANSACTION_TABLE); + statement.execute(insertQuery2); + connection.rollback(); // Second transaction + + // Verify state with the static bigQueryStatement + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int count = 0; + while (resultSet.next()) { + count++; + assertEquals(14, resultSet.getInt("age")); + } + assertEquals(1, count); // Only first transaction should be committed. + + // Verify the second insert was rolled back + ResultSet rs2 = + bigQueryStatement.executeQuery( + String.format("SELECT * FROM %s.%s WHERE id=15", DATASET, TRANSACTION_TABLE)); + assertFalse(rs2.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %s.%s", DATASET, TRANSACTION_TABLE)); + + statement.close(); + connection.close(); + } + + // Private Helper functions + private String getSessionId() throws InterruptedException { + QueryJobConfiguration stubJobConfig = + QueryJobConfiguration.newBuilder("Select 1;").setCreateSession(true).build(); + Job job = bigQuery.create(JobInfo.of(stubJobConfig)); + job = job.waitFor(); + Job stubJob = bigQuery.getJob(job.getJobId()); + return stubJob.getStatistics().getSessionInfo().getSessionId(); + } + + @Test + public void testCallableStatementScriptExecuteUpdate() throws SQLException { + int randomNum = java.util.UUID.randomUUID().hashCode(); + String insertName = "callable-statement-dml-insert-test"; + String insertResult = String.format("%s-%d", insertName, randomNum); + String updateName = "callable-statement-dml-update-test"; + String updateResult = String.format("%s-%d", updateName, randomNum); + String selectStmtQuery = + String.format("SELECT * FROM %s.%s WHERE id = ?", DATASET, CALLABLE_STMT_DML_TABLE_NAME); + String insertCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_INSERT_PROC_NAME); + String updateCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_UPDATE_PROC_NAME); + String deleteCallStmtQuery = + String.format("CALL %s.%s(?);", DATASET, CALLABLE_STMT_DML_DELETE_PROC_NAME); + + // DML INSERT + CallableStatement callableStatement = bigQueryConnection.prepareCall(insertCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, insertName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, insertResult); + int rowsInserted = callableStatement.executeUpdate(); + assertEquals(1, rowsInserted); + + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + ResultSet rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(insertName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(insertResult, rs.getString(3)); + + // DML UPDATE + callableStatement = bigQueryConnection.prepareCall(updateCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, updateName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, updateResult); + int rowsUpdated = callableStatement.executeUpdate(); + assertEquals(1, rowsUpdated); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(updateName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(updateResult, rs.getString(3)); + + // DML DELETE + callableStatement = bigQueryConnection.prepareCall(deleteCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setInt(1, randomNum); + int rowsDeleted = callableStatement.executeUpdate(); + assertEquals(1, rowsDeleted); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertFalse(rs.next()); + + callableStatement.close(); + } + + @Test + public void testCallableStatementScriptExecuteLargeUpdate() throws SQLException { + int randomNum = java.util.UUID.randomUUID().hashCode(); + String insertName = "callable-statement-dml-insert-test"; + String insertResult = String.format("%s-%d", insertName, randomNum); + String updateName = "callable-statement-dml-update-test"; + String updateResult = String.format("%s-%d", updateName, randomNum); + String selectStmtQuery = + String.format("SELECT * FROM %s.%s WHERE id = ?", DATASET, CALLABLE_STMT_DML_TABLE_NAME); + String insertCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_INSERT_PROC_NAME); + String updateCallStmtQuery = + String.format("CALL %s.%s(?,?,?);", DATASET, CALLABLE_STMT_DML_UPDATE_PROC_NAME); + String deleteCallStmtQuery = + String.format("CALL %s.%s(?);", DATASET, CALLABLE_STMT_DML_DELETE_PROC_NAME); + + // DML INSERT + CallableStatement callableStatement = bigQueryConnection.prepareCall(insertCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, insertName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, insertResult); + long rowsInserted = callableStatement.executeLargeUpdate(); + assertEquals(1L, rowsInserted); + + PreparedStatement preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + ResultSet rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(insertName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(insertResult, rs.getString(3)); + + // DML UPDATE + callableStatement = bigQueryConnection.prepareCall(updateCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setString(1, updateName); + callableStatement.setInt(2, randomNum); + callableStatement.setString(3, updateResult); + long rowsUpdated = callableStatement.executeLargeUpdate(); + assertEquals(1L, rowsUpdated); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertTrue(rs.next()); + + assertEquals(updateName, rs.getString(1)); + assertEquals(randomNum, rs.getInt(2)); + assertEquals(updateResult, rs.getString(3)); + + // DML DELETE + callableStatement = bigQueryConnection.prepareCall(deleteCallStmtQuery); + assertNotNull(callableStatement); + callableStatement.setInt(1, randomNum); + long rowsDeleted = callableStatement.executeLargeUpdate(); + assertEquals(1L, rowsDeleted); + + preparedStatement = bigQueryConnection.prepareStatement(selectStmtQuery); + assertNotNull(preparedStatement); + preparedStatement.setInt(1, randomNum); + rs = preparedStatement.executeQuery(); + assertNotNull(rs); + assertFalse(rs.next()); + + callableStatement.close(); + } + + @Test + public void testScript() throws SQLException { + String BASE_QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 order by" + + " trip_distance asc LIMIT %s;"; + String query1 = String.format(BASE_QUERY, 5000); + String query2 = String.format(BASE_QUERY, 7000); + String query3 = String.format(BASE_QUERY, 9000); + + bigQueryStatement.execute(query1 + query2 + query3); + ResultSet resultSet = bigQueryStatement.getResultSet(); + assertEquals(5000, resultSetRowCount(resultSet)); + + boolean hasMoreResult = bigQueryStatement.getMoreResults(); + assertTrue(hasMoreResult); + resultSet = bigQueryStatement.getResultSet(); + assertEquals(7000, resultSetRowCount(resultSet)); + + hasMoreResult = bigQueryStatement.getMoreResults(); + assertTrue(hasMoreResult); + resultSet = bigQueryStatement.getResultSet(); + assertEquals(9000, resultSetRowCount(resultSet)); + } + + @Test + public void testCallableStatementScriptExecute() throws SQLException { + int randomNum = random.nextInt(99); + String callableStmtQuery = + String.format( + "DECLARE call_result STRING;" + + "CALL %s.%s(?,?,call_result);" + + "SELECT * FROM %s.%s WHERE result = call_result;", + DATASET, CALLABLE_STMT_PROC_NAME, DATASET, CALLABLE_STMT_TABLE_NAME); + CallableStatement callableStatement = bigQueryConnection.prepareCall(callableStmtQuery); + callableStatement.setString(1, "callable-stmt-test"); + callableStatement.setInt(2, randomNum); + + assertFalse(callableStatement.execute()); + assertEquals(1, callableStatement.getUpdateCount()); + + // This is an actual SELECT * from the above + assertTrue(callableStatement.getMoreResults()); + ResultSet resultSet = callableStatement.getResultSet(); + ResultSetMetaData rsMetadata = resultSet.getMetaData(); + assertEquals(3, rsMetadata.getColumnCount()); + + assertTrue(resultSet.next()); + + String expected = String.format("callable-stmt-test-%d", randomNum); + String actual = resultSet.getString(3); + + assertEquals(expected, actual); + + // Validate there are no more results + assertFalse(callableStatement.getMoreResults()); + assertEquals(-1, callableStatement.getUpdateCount()); + callableStatement.close(); + } + + @Test + public void testExecuteScriptWithExpession() throws SQLException { + int randomNum = random.nextInt(99); + String query = String.format("DECLARE x INT64; SET x = (SELECT %s); SELECT x;", randomNum); + + assertTrue(bigQueryStatement.execute(query)); + ResultSet rs = bigQueryStatement.getResultSet(); + assertTrue(rs.next()); + assertEquals(randomNum, rs.getInt(1)); + assertFalse(rs.next()); + assertFalse(bigQueryStatement.getMoreResults()); + assertEquals(-1, bigQueryStatement.getUpdateCount()); + } + + @Test + public void testInformationSchemaTables() throws SQLException { + String query = String.format("SELECT * FROM %s.INFORMATION_SCHEMA.TABLES", DATASET); + try (Statement statement = bigQueryConnection.createStatement(); + ResultSet resultSet = statement.executeQuery(query)) { + ResultSetMetaData metaData = resultSet.getMetaData(); + int columnCount = metaData.getColumnCount(); + assertTrue(columnCount > 0); + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + for (int i = 1; i <= columnCount; i++) { + Object obj = resultSet.getObject(i); + if (obj != null) { + assertNotNull(obj.toString()); + } + } + } + assertTrue(rowCount > 0); + } + } + + private void validate( + String method, + BiFunction getter, + ImmutableMap expectedResult) + throws Exception { + + try (Connection connection = DriverManager.getConnection(connection_uri); + Connection connectionHTAPI = + DriverManager.getConnection( + connection_uri + + ";HighThroughputMinTableSize=0;HighThroughputActivationRatio=0;EnableHighThroughputAPI=1;"); + Statement statement = connection.createStatement(); + Statement statementHTAPI = connectionHTAPI.createStatement()) { + + String query = + "SELECT * FROM INTEGRATION_TEST_FORMAT.all_bq_types WHERE stringField is not null"; + ResultSet resultSetRegular = statement.executeQuery(query); + ResultSet resultSetArrow = statementHTAPI.executeQuery(query); + resultSetRegular.next(); + resultSetArrow.next(); + + for (int i = 1; i <= resultSetRegular.getMetaData().getColumnCount(); i++) { + String columnName = resultSetRegular.getMetaData().getColumnName(i); + + String regularApiLabel = + String.format("[Method: %s] [Column: %s] [API: Regular]", method, columnName); + String htapiApiLabel = + String.format("[Method: %s] [Column: %s] [API: HTAPI]", method, columnName); + + if (expectedResult.containsKey(columnName)) { + Object expectedValue = expectedResult.get(columnName); + + assertEquals(regularApiLabel, expectedValue, getter.apply(resultSetRegular, i)); + assertEquals(htapiApiLabel, expectedValue, getter.apply(resultSetArrow, i)); + + } else { + String regularMsg = "Expected exception but got a value. " + regularApiLabel; + assertEquals(regularMsg, EXCEPTION_REPLACEMENT, getter.apply(resultSetRegular, i)); + + String htapiMsg = "Expected exception but got a value. " + htapiApiLabel; + assertEquals(htapiMsg, EXCEPTION_REPLACEMENT, getter.apply(resultSetArrow, i)); + } + } + } + } + + @Test + public void validateGetString() throws Exception { + final ImmutableMap stringResults = + new ImmutableMap.Builder() + .put("stringField", "StringValue") + .put("bytesField", "Qnl0ZXNWYWx1ZQ==") + .put("intField", "123") + .put("floatField", "10.5") + .put("numericField", "12345.67") + .put("bigNumericField", "98765432109876543210.123456789") + .put("booleanField", "true") + .put("timestampFiled", "2023-07-28 12:30:00.000000") + .put("dateField", "2023-07-28") + .put("timeField", "12:30:00.000") + .put("dateTimeField", "2023-07-28 12:30:00.000000") + .put("geographyField", "POINT(-74.006 40.7128)") + .put( + "recordField", + "{\"name\":\"NameValue\",\"recordNested\":{\"lastName\":\"LastNameValue\"}}") + .put("rangeField", "[2023-01-01, 2023-12-01)") + .put("jsonField", "{\"key\":\"value\"}") + .put("arrayString", "[abc, def, ghi]") + .put("arrayRecord", "[{\"value\":\"rec_val1\"}, {\"value\":\"rec_val2\"}]") + .put("arrayBytes", "[Ynl0ZTE=, Ynl0ZTI=]") + .put("arrayInteger", "[10, 20]") + .put("arrayNumeric", "[10.5, 20.5]") + .put("arrayBignumeric", "[100.1, 200.2]") + .put("arrayBoolean", "[true, false]") + .put("arrayTimestamp", "[2023-01-01 01:00:00.0, 2023-01-01 02:00:00.0]") + .put("arrayDate", "[2023-01-01, 2023-01-02]") + .put("arrayTime", "[01:00:00, 02:00:00]") + .put("arrayDatetime", "[2023-01-01 01:00:00.0, 2023-01-01 02:00:00.0]") + .put("arrayGeography", "[POINT(1 1), POINT(2 2)]") + .put("arrayRange", "[[2023-01-01, 2023-01-03), [2023-01-04, 2023-01-06)]") + .put("arrayJson", "[{\"a\":1}, {\"b\":2}]") + .put("arrayFloat", "[1.1, 2.2]") + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getString(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getString", getter, stringResults); + } + + @Test + public void validateGetInt() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", 123) + .put("floatField", 10) + .put("numericField", 12345) + .put("booleanField", 1) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getInt(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getInt", getter, result); + } + + @Test + public void validateGetLong() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", 123L) + .put("floatField", 10L) + .put("numericField", 12345L) + .put("booleanField", 1L) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getLong(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getLong", getter, result); + } + + @Test + public void validateGetBool() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", true) + .put("floatField", true) + .put("numericField", true) + .put("booleanField", true) + .put("bigNumericField", true) + .put("stringField", false) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getBoolean(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getBool", getter, result); + } + + @Test + public void validateGetFloat() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (float) 123.0) + .put("floatField", (float) 10.5) + .put("numericField", (float) 12345.67) + .put("bigNumericField", (float) 98765432109876543210.123456789) + .put("booleanField", (float) 1.0) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getFloat(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getFloat", getter, result); + } + + @Test + public void validateGetDouble() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (double) 123.0) + .put("floatField", (double) 10.5) + .put("numericField", (double) 12345.67) + .put("bigNumericField", (double) 98765432109876543210.123456789) + .put("booleanField", (double) 1.0) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getDouble(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getDouble", getter, result); + } + + @Test + public void validateGetShort() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (short) 123) + .put("floatField", (short) 10) + .put("numericField", (short) 12345) + .put("booleanField", (short) 1) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getShort(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getShort", getter, result); + } + + @Test + public void validateGetTime() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("timeField", Time.valueOf("12:30:00")) + .put("dateTimeField", Time.valueOf("12:30:00")) + .put("timestampFiled", Time.valueOf("12:30:00")) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getTime(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getTime", getter, result); + } + + @Test + public void validateGetDate() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("dateField", Date.valueOf("2023-07-28")) + .put("dateTimeField", Date.valueOf("2023-07-28")) + .put("timestampFiled", Date.valueOf("2023-07-28")) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getDate(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getDate", getter, result); + } + + @Test + public void validateGetTimestamp() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("timeField", Timestamp.valueOf("1970-01-01 12:30:00")) + .put("dateField", Timestamp.valueOf("2023-07-28 00:00:00")) + .put("dateTimeField", Timestamp.valueOf("2023-07-28 12:30:00")) + .put("timestampFiled", Timestamp.valueOf("2023-07-28 12:30:00")) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getTimestamp(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getTimestamp", getter, result); + } + + @Test + public void validateGetByte() throws Exception { + final ImmutableMap result = + new ImmutableMap.Builder() + .put("intField", (byte) 123) + .put("booleanField", (byte) 1) + .put("floatField", (byte) 10) + .build(); + BiFunction getter = + (s, i) -> { + try { + return s.getByte(i); + } catch (Exception e) { + return EXCEPTION_REPLACEMENT; + } + }; + validate("getByte", getter, result); + } + + @Test + public void validateGetObjectNullValues() throws Exception { + try (Connection connection = DriverManager.getConnection(connection_uri); + Connection connectionHTAPI = + DriverManager.getConnection( + connection_uri + + ";HighThroughputMinTableSize=0;HighThroughputActivationRatio=0;EnableHighThroughputAPI=1;"); + Statement statement = connection.createStatement(); + Statement statementHTAPI = connectionHTAPI.createStatement()) { + + String query = + "SELECT * FROM INTEGRATION_TEST_FORMAT.all_bq_types WHERE stringField is null;"; + ResultSet resultSetRegular = statement.executeQuery(query); + ResultSet resultSetArrow = statementHTAPI.executeQuery(query); + resultSetRegular.next(); + resultSetArrow.next(); + + for (int i = 1; i <= resultSetRegular.getMetaData().getColumnCount(); i++) { + String columnName = resultSetRegular.getMetaData().getColumnName(i); + if (!columnName.contains("array")) { + assertNull(resultSetRegular.getObject(i)); + assertNull(resultSetArrow.getObject(i)); + } else { + assertEquals(resultSetRegular.getObject(i).toString(), "[]"); + assertEquals(resultSetArrow.getObject(i).toString(), "[]"); + } + } + } + } + + private int resultSetRowCount(ResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITNightlyBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITNightlyBigQueryTest.java new file mode 100644 index 0000000000..30124b4a04 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITNightlyBigQueryTest.java @@ -0,0 +1,1713 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static com.google.common.truth.Truth.assertThat; +import static java.util.Arrays.asList; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobInfo; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import com.google.cloud.bigquery.exception.BigQueryJdbcSqlSyntaxErrorException; +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import com.google.cloud.bigquery.jdbc.BigQueryDriver; +import java.nio.charset.StandardCharsets; +import java.sql.Connection; +import java.sql.Date; +import java.sql.Driver; +import java.sql.DriverManager; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Arrays; +import java.util.Properties; +import java.util.Random; +import java.util.concurrent.atomic.AtomicBoolean; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.Test; + +public class ITNightlyBigQueryTest { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + static Connection bigQueryConnection; + static Statement bigQueryStatement; + static BigQuery bigQuery; + private static final Random random = new Random(); + private static final int randomNumber = random.nextInt(9999); + private static final String BASE_QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 order by" + + " trip_distance asc LIMIT %s"; + private static final String CONSTRAINTS_DATASET = "JDBC_CONSTRAINTS_TEST_DATASET"; + private static final String CONSTRAINTS_TABLE_NAME = "JDBC_CONSTRAINTS_TEST_TABLE"; + private static final String CONSTRAINTS_TABLE_NAME2 = "JDBC_CONSTRAINTS_TEST_TABLE2"; + private static final String CALLABLE_STMT_PROC_NAME = "IT_CALLABLE_STMT_PROC_TEST"; + private static final String CALLABLE_STMT_TABLE_NAME = "IT_CALLABLE_STMT_PROC_TABLE"; + private static final String CALLABLE_STMT_PARAM_KEY = "CALL_STMT_PARAM_KEY"; + private static final String CALLABLE_STMT_DML_INSERT_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_INSERT_TEST"; + private static final String CALLABLE_STMT_DML_UPDATE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_UPDATE_TEST"; + private static final String CALLABLE_STMT_DML_DELETE_PROC_NAME = + "IT_CALLABLE_STMT_PROC_DML_DELETE_TEST"; + private static final String CALLABLE_STMT_DML_TABLE_NAME = "IT_CALLABLE_STMT_PROC_DML_TABLE"; + private static final String DATASET = "JDBC_NIGHTLY_IT_DATASET"; + private static final String DATASET2 = "JDBC_PRESUBMIT_INTEGRATION_DATASET_2"; + static final String session_enabled_connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;EnableSession=1"; + + static final String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3"; + + @BeforeClass + public static void beforeClass() throws SQLException { + bigQueryConnection = DriverManager.getConnection(connection_uri, new Properties()); + bigQueryStatement = bigQueryConnection.createStatement(); + bigQuery = BigQueryOptions.newBuilder().build().getService(); + } + + @AfterClass + public static void afterClass() throws SQLException { + bigQueryStatement.close(); + bigQueryConnection.close(); + } + + @Test + public void testMergeInExecuteBatch() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "Inventory" + random.nextInt(9999); + String TABLE_NAME2 = "DetailedInventory" + random.nextInt(9999); + + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER);", + DATASET, TABLE_NAME1); + + String createQuery2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER," + + " `supply_constrained` BOOLEAN, `comment` STRING);", + DATASET, TABLE_NAME2); + + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (product, quantity, supply_constrained, comment) " + + "VALUES ('countertop microwave', 20, NULL,'[]' )," + + " ('front load washer', 20, false,'[]' ), " + + " ('microwave', 20, false,'[]' ), " + + " ('refrigerator', 10, false,'[]' );", + DATASET, TABLE_NAME2); + + bigQueryStatement.execute(createQuery); + bigQueryStatement.execute(createQuery2); + bigQueryStatement.execute(insertQuery2); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (product, quantity) " + + "VALUES (?,? ), (?,? ), (?,? ), (?,? ), (?,? ), (?,? );", + DATASET, TABLE_NAME1); + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 30); + insertPs.setString(3, "dryer"); + insertPs.setInt(4, 30); + insertPs.setString(5, "front load washer"); + insertPs.setInt(6, 20); + insertPs.setString(7, "microwave"); + insertPs.setInt(8, 20); + insertPs.setString(9, "oven"); + insertPs.setInt(10, 5); + insertPs.setString(11, "top load washer"); + insertPs.setInt(12, 10); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(6, insertStatus); + + String updateQuery = + String.format("UPDATE %s.%s SET quantity=? WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement updatePs = bigQueryConnection.prepareStatement(updateQuery); + updatePs.setString(2, "dryer"); + updatePs.setInt(1, 35); + + int updateStatus = updatePs.executeUpdate(); + assertEquals(1, updateStatus); + + String deleteQuery = String.format("DELETE FROM %s.%s WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement deletePs = bigQueryConnection.prepareStatement(deleteQuery); + deletePs.setString(1, "dishwasher"); + + int deleteStatus = deletePs.executeUpdate(); + assertEquals(1, deleteStatus); + + Statement statement = bigQueryConnection.createStatement(); + String mergeQuery = + String.format( + "MERGE %s.%s T\n" + + "USING %s.%s S\n" + + "ON T.product = S.product\n" + + "WHEN NOT MATCHED AND quantity < 100 THEN\n" + + " INSERT(product, quantity, supply_constrained, comment)\n" + + " VALUES(product, quantity, true, '[]')\n", + DATASET, TABLE_NAME2, DATASET, TABLE_NAME1); + statement.addBatch(mergeQuery); + int[] result = statement.executeBatch(); + + assertEquals(1, result.length); + assertEquals(3, result[0]); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1)); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME2)); + } + + @Test + public void testValidLongRunningQuery() throws SQLException { + // setup + String selectQuery = + "SELECT * FROM `bigquery-public-data.deepmind_alphafold.metadata` LIMIT 50000"; + + // Read data via JDBC + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + + for (int i = 0; i < 50000; i++) { + resultSet.next(); + assertFalse(resultSet.wasNull()); + assertNotNull(resultSet.getString(5)); + } + + // clean up + resultSet.close(); + } + + @Test + public void testQueryInterruptGracefullyStopsExplicitJob() + throws SQLException, InterruptedException { + AtomicBoolean threadException = new AtomicBoolean(true); + Connection bigQueryConnection = + DriverManager.getConnection(connection_uri + ";JobCreationMode=1", new Properties()); + Statement bigQueryStatement = bigQueryConnection.createStatement(); + + // This query takes 300 seconds to complete + String query300Seconds = + "DECLARE DELAY_TIME DATETIME; SET DELAY_TIME = DATETIME_ADD(CURRENT_DATETIME, INTERVAL 300" + + " SECOND); WHILE CURRENT_DATETIME < DELAY_TIME DO END WHILE;"; + + // Query will be started in the background thread & we will call cancel from current thread. + Thread t = + new Thread( + () -> { + SQLException e = + assertThrows( + SQLException.class, () -> bigQueryStatement.execute(query300Seconds)); + assertTrue(e.getMessage().contains("User requested cancellation")); + threadException.set(false); + }); + t.start(); + // Allow thread to actually initiate the query + Thread.sleep(3000); + bigQueryStatement.cancel(); + // Wait until background thread is finished + t.join(); + assertFalse(threadException.get()); + // Ensure statement can be used again + assertFalse(bigQueryStatement.isClosed()); + bigQueryStatement.executeQuery("SELECT 1"); + } + + @Test + public void testQueryInterruptGracefullyStopsOptionalJob() + throws SQLException, InterruptedException { + AtomicBoolean threadException = new AtomicBoolean(true); + Connection bigQueryConnection = + DriverManager.getConnection(connection_uri + ";JobCreationMode=2", new Properties()); + Statement bigQueryStatement = bigQueryConnection.createStatement(); + + // This query takes 300 seconds to complete + String query300Seconds = + "DECLARE DELAY_TIME DATETIME; SET DELAY_TIME = DATETIME_ADD(CURRENT_DATETIME, INTERVAL 300" + + " SECOND); WHILE CURRENT_DATETIME < DELAY_TIME DO END WHILE;"; + + // Query will be started in the background thread & we will call cancel from current thread. + Thread t = + new Thread( + () -> { + SQLException e = + assertThrows( + SQLException.class, () -> bigQueryStatement.execute(query300Seconds)); + assertTrue(e.getMessage().contains("Query was cancelled.")); + threadException.set(false); + }); + t.start(); + // Allow thread to actually initiate the query + Thread.sleep(3000); + bigQueryStatement.cancel(); + // Wait until background thread is finished + t.join(); + assertFalse(threadException.get()); + // Ensure statement can be used again + assertFalse(bigQueryStatement.isClosed()); + bigQueryStatement.executeQuery("SELECT 1"); + } + + @Test + public void testWideColumnQueries() throws SQLException { + String selectQuery = + "SELECT * FROM `bigquery-public-data.covid19_open_data_eu.covid19_open_data` LIMIT 50000"; + + // Read data via JDBC + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + + for (int i = 0; i < 50000; i++) { + resultSet.next(); + assertFalse(resultSet.wasNull()); + } + + // clean up + resultSet.close(); + } + + @Test + public void testExecuteLargeUpdate() throws SQLException { + String tableName = "JDBC_LARGE_UPDATE_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE TABLE %s.%s (" + + " gbifid STRING, scientificname STRING, " + + " individualcount INTEGER, isReviewed BOOLEAN)", + DATASET, tableName); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (gbifid, scientificname, individualcount) " + + "SELECT gbifid, scientificname, individualcount FROM " + + "bigquery-public-data.gbif.occurrences;", + DATASET, tableName); + String updateQuery = + String.format( + "UPDATE %s.%s SET isReviewed = false WHERE individualcount >= 0 OR individualcount IS" + + " NULL", + DATASET, tableName); + + String selectQuery = String.format("SELECT * FROM %s.%s LIMIT 10", DATASET, tableName); + + bigQueryStatement.execute(createQuery); + + long insertCount = bigQueryStatement.executeLargeUpdate(insertQuery); + assertTrue(insertCount > Integer.MAX_VALUE); + + long updateCount = bigQueryStatement.executeLargeUpdate(updateQuery); + assertTrue(updateCount > Integer.MAX_VALUE); + + ResultSet selectResult = bigQueryStatement.executeQuery(selectQuery); + assertTrue(selectResult.next()); + assertFalse(selectResult.getBoolean("isReviewed")); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, tableName)); + } + + @Test + public void testHTAPIWithValidDestinationTableSavesQueriesWithStandardSQL() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=SQL;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;" + + "EnableHighThroughputAPI=1;"; + String selectLegacyQuery = + "SELECT * FROM `bigquery-public-data.deepmind_alphafold.metadata` LIMIT 200000;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM INTEGRATION_TESTS.destination_table_test;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(200000, resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + } + + @Test + public void testBigQueryConcurrentLimitWithExecuteBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_CONCURRENT_LIMIT_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + int[] results; + for (int i = 0; i < 30; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testValidExecuteBatch() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + int[] results; + for (int i = 0; i < 30; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + assertEquals(30, results.length); + for (int updateCount : results) { + assertEquals(1, updateCount); + } + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testValidExecuteBatchWithMultipleDatasets() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MULTIPLE_DATASET_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String createBatchTable2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET2, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + bigQueryStatement.execute(createBatchTable2); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + int[] results; + for (int i = 0; i < 15; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + for (int i = 0; i < 15; i++) { + String insertQuery = + "INSERT INTO " + + DATASET2 + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + for (int updateCount : results) { + assertEquals(1, updateCount); + } + + // do a select to validate row count on each + String selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(15, resultSetRowCount(resultSet)); + selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET2, BATCH_TABLE); + resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(15, resultSetRowCount(resultSet)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testValidExecuteBatchWithMultipleTables() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MULTI_TABLES_" + random.nextInt(99); + String BATCH_TABLE_2 = "JDBC_EXECUTE_BATCH_TABLE_MULTI_TABLES_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String createBatchTable2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE_2); + bigQueryStatement.execute(createBatchTable); + bigQueryStatement.execute(createBatchTable2); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + int[] results; + for (int i = 0; i < 5; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + for (int i = 0; i < 5; i++) { + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE_2 + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "; + statement.addBatch(insertQuery); + } + results = statement.executeBatch(); + + // assertions + for (int updateCount : results) { + assertEquals(1, updateCount); + } + + // do a select to test row count on each + String selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(5, resultSetRowCount(resultSet)); + selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE_2); + resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(5, resultSetRowCount(resultSet)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE_2)); + } + + @Test + public void testPreparedStatementExecuteUpdate() throws SQLException { + Random random = new Random(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME1 = "Inventory" + random.nextInt(9999); + String TABLE_NAME2 = "DetailedInventory" + random.nextInt(9999); + + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER);", + DATASET, TABLE_NAME1); + + String createQuery2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`product` STRING, `quantity` INTEGER," + + " `supply_constrained` BOOLEAN, `comment` STRING);", + DATASET, TABLE_NAME2); + + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (product, quantity, supply_constrained, comment) " + + "VALUES ('countertop microwave', 20, NULL,'[]' )," + + " ('front load washer', 20, false,'[]' ), " + + " ('microwave', 20, false,'[]' ), " + + " ('refrigerator', 10, false,'[]' );", + DATASET, TABLE_NAME2); + + bigQueryStatement.execute(createQuery); + bigQueryStatement.execute(createQuery2); + bigQueryStatement.execute(insertQuery2); + + String insertQuery = + String.format( + "INSERT INTO %s.%s (product, quantity) " + + "VALUES (?,? ), (?,? ), (?,? ), (?,? ), (?,? ), (?,? );", + DATASET, TABLE_NAME1); + PreparedStatement insertPs = bigQueryConnection.prepareStatement(insertQuery); + insertPs.setString(1, "dishwasher"); + insertPs.setInt(2, 30); + insertPs.setString(3, "dryer"); + insertPs.setInt(4, 30); + insertPs.setString(5, "front load washer"); + insertPs.setInt(6, 20); + insertPs.setString(7, "microwave"); + insertPs.setInt(8, 20); + insertPs.setString(9, "oven"); + insertPs.setInt(10, 5); + insertPs.setString(11, "top load washer"); + insertPs.setInt(12, 10); + + int insertStatus = insertPs.executeUpdate(); + assertEquals(6, insertStatus); + + String updateQuery = + String.format("UPDATE %s.%s SET quantity=? WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement updatePs = bigQueryConnection.prepareStatement(updateQuery); + updatePs.setString(2, "dryer"); + updatePs.setInt(1, 35); + + int updateStatus = updatePs.executeUpdate(); + assertEquals(1, updateStatus); + + String deleteQuery = String.format("DELETE FROM %s.%s WHERE product=?", DATASET, TABLE_NAME1); + PreparedStatement deletePs = bigQueryConnection.prepareStatement(deleteQuery); + deletePs.setString(1, "dishwasher"); + + int deleteStatus = deletePs.executeUpdate(); + assertEquals(1, deleteStatus); + + String mergeQuery = + String.format( + "MERGE %s.%s T\n" + + "USING %s.%s S\n" + + "ON T.product = S.product\n" + + "WHEN NOT MATCHED AND quantity < ? THEN\n" + + " INSERT(product, quantity, supply_constrained, comment)\n" + + " VALUES(product, quantity, true, ?)\n" + + "WHEN NOT MATCHED THEN\n" + + " INSERT(product, quantity, supply_constrained)\n" + + " VALUES(product, quantity, false)", + DATASET, TABLE_NAME2, DATASET, TABLE_NAME1); + PreparedStatement mergePs = bigQueryConnection.prepareStatement(mergeQuery); + mergePs.setInt(1, 20); + mergePs.setString(2, "comment" + random.nextInt(999)); + + int mergeStatus = mergePs.executeUpdate(); + assertEquals(3, mergeStatus); + + ResultSet rs = + bigQueryStatement.executeQuery( + String.format("SELECT COUNT(*) AS row_count\n" + "FROM %s.%s", DATASET, TABLE_NAME2)); + rs.next(); + assertEquals(7, rs.getInt(1)); + + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME1); + int dropStatus = bigQueryStatement.executeUpdate(dropQuery); + assertEquals(0, dropStatus); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME2)); + } + + @Test + public void testFailedStatementInTheMiddleOfExecuteBatchStopsExecuting() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_ERROR_IN_MIDDLE_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String createBatchTable2 = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET2, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + bigQueryStatement.execute(createBatchTable2); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET2, BATCH_TABLE); + + // act + for (int i = 0; i < 20; i++) { + if (i == 10) { + statement.addBatch( + "INSERT INTO " + + DATASET2 + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "); + } else { + statement.addBatch( + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + i + + "); "); + } + } + bigQueryStatement.execute(dropQuery); + + // assertions + assertThrows(BigQueryJdbcException.class, statement::executeBatch); + String selectQuery = String.format("SELECT id, name, age FROM %s.%s ;", DATASET, BATCH_TABLE); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(10, resultSetRowCount(resultSet)); + bigQueryStatement.execute(String.format("DROP TABLE %s.%s", DATASET, BATCH_TABLE)); + } + + @Test + public void testHTAPIWithValidDestinationTableSavesQueriesWithLegacy() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryDialect=BIG_QUERY;" + + "LargeResultTable=destination_table_test;" + + "LargeResultDataset=INTEGRATION_TESTS;" + + "EnableHighThroughputAPI=1;"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 200000;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + ResultSet resultSet = statement.executeQuery(selectLegacyQuery); + + // assertion + assertNotNull(resultSet); + String selectQuery = "SELECT * FROM INTEGRATION_TESTS.destination_table_test;"; + ResultSet actualResultSet = bigQueryStatement.executeQuery(selectQuery); + assertTrue(0 < resultSetRowCount(actualResultSet)); + + // clean up + String deleteRows = "DELETE FROM `INTEGRATION_TESTS.destination_table_test` WHERE 1=1;"; + bigQueryStatement.execute(deleteRows); + } + + @Test + public void testMultiStatementTransactionRollbackByUser() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + connection.rollback(); + assertTrue( + "After rollback() in manual commit mode, a new transaction should be started.", + connection.isTransactionStarted()); + + // Separate query to check if transaction rollback worked + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testMultiStatementTransactionDoesNotCommitWithoutCommit() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + + // Separate query to check nothing committed + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + statement.close(); + connection.close(); + } + + @Test + public void testValidMultiStatementTransactionCommits() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + status = statement.execute(updateQuery); + assertFalse(status); + status = statement.execute(selectQuery); + assertTrue(status); + connection.commit(); + + // Separate query to check inserted and updated data committed + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertTrue(resultSet.next()); + assertEquals(14, resultSet.getInt(3)); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + statement.close(); + connection.close(); + } + + @Test + public void testConnectionWithMultipleTransactionCommits() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + boolean status = statement.execute(insertQuery); + assertFalse(status); + status = statement.execute(updateQuery); + assertFalse(status); + status = statement.execute(selectQuery); + assertTrue(status); + connection.commit(); + + connection.setAutoCommit(false); + assertTrue(connection.isTransactionStarted()); + statement.execute(insertQuery); + connection.rollback(); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int count = 0; + while (resultSet.next()) { + count++; + } + assertEquals(1, count); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testTransactionRollbackOnError() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s ;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + String transactionOnError = + "BEGIN\n" + + "\n" + + " BEGIN TRANSACTION;\n" + + " INSERT INTO " + + DATASET + + "." + + TRANSACTION_TABLE + + "\n" + + " VALUES (39, 'Drake', 123);\n" + + " SELECT 1/0;\n" + + " COMMIT TRANSACTION;\n" + + "\n" + + "EXCEPTION WHEN ERROR THEN\n" + + " SELECT @@error.message;\n" + + " ROLLBACK TRANSACTION;\n" + + "END;"; + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + Statement statement = connection.createStatement(); + statement.execute(transactionOnError); + + // do a check to see if no vals inserted + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testClearBatchClears() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_CLEAR_BATCH_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + bigQueryStatement.execute(createBatchTable); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 13 WHERE age = %s;", DATASET, BATCH_TABLE, randomNumber); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + statement.addBatch(updateQuery); + statement.clearBatch(); + int[] results = statement.executeBatch(); + + // assertion + assertEquals(0, results.length); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testMultipleExecuteBatches() throws SQLException { + // setup + String BATCH_TABLE = "JDBC_EXECUTE_BATCH_TABLE_MULTI_BATCHES_" + random.nextInt(99); + String createBatchTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, BATCH_TABLE); + String insertQuery = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', " + + randomNumber + + "); "; + String insertQuery2 = + "INSERT INTO " + + DATASET + + "." + + BATCH_TABLE + + " (id, name, age) " + + "VALUES (12, 'Farhan', 123), " + + " (12, 'Farhan', 123); "; + bigQueryStatement.execute(createBatchTable); + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act + // batch bypasses the 16 concurrent limit + statement.addBatch(insertQuery); + int[] firstResults = statement.executeBatch(); + statement.addBatch(insertQuery2); + int[] secondResults = statement.executeBatch(); + + // assertions + assertEquals(1, firstResults.length); + assertEquals(1, secondResults.length); + assertEquals(1, firstResults[0]); + assertEquals(2, secondResults[0]); + bigQueryStatement.execute(String.format("DROP TABLE IF EXISTS %S.%s", DATASET, BATCH_TABLE)); + connection.close(); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQuery() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_DATATYPES_INTEGRATION_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(16, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10L).toArray()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356456"), resultSet.getObject(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Timestamp.valueOf("2019-02-17 11:24:00"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + assertEquals("123-7 -19 0:24:12.000006", resultSet.getString(16)); + } + + @Test + public void testRepeatedStructFromSelectQuery() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_REPEATED_STRUCT_INTEGRATION_TEST"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + resultSet.next(); + + Struct[] repeatedStruct = (Struct[]) resultSet.getArray(1).getArray(); + assertEquals(3, Arrays.stream(repeatedStruct).count()); + + Object[] alice = repeatedStruct[0].getAttributes(); + Object[] bob = repeatedStruct[1].getAttributes(); + Object[] charlie = repeatedStruct[2].getAttributes(); + assertEquals("Alice", alice[0]); + assertEquals("30", alice[1]); + assertEquals("Bob", bob[0]); + assertEquals("25", bob[1]); + assertEquals("Charlie", charlie[0]); + assertEquals("35", charlie[1]); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQueryArrowDataset() throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_INTEGRATION_ARROW_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME + " LIMIT 5000;"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;ProjectId=" + + PROJECT_ID + + ";EnableHighThroughputAPI=1;" + + "HighThroughputActivationRatio=2;" + + "HighThroughputMinTableSize=1000;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet); + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(15, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10L).toArray()); + assertEquals("{\"name\":\"Eric\",\"age\":10}", expectedStruct.toString()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getObject(10)); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getTimestamp(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getDate(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getTime(12)); + assertEquals(Timestamp.valueOf("2022-01-22 22:22:12.142265"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + connection.close(); + } + + /////////////// MARKER + /// + + @Test + public void testBulkInsertOperation() throws SQLException { + String TABLE_NAME = "JDBC_BULK_INSERT_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING,\n" + + " `IntegerField` INTEGER," + + " `FloatField` FLOAT64," + + " `NumericField` NUMERIC," + + " `BigNumericField` BIGNUMERIC," + + " `BooleanField` BOOLEAN" + + " );", + DATASET, TABLE_NAME); + String insertQuery = + String.format("INSERT INTO %s.%s VALUES(?, ?, ?,?, ?, ?);", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;" + + "EnableWriteAPI=1;SWA_ActivationRowCount=5;SWA_AppendRowCount=500"; + + try (Connection connection = DriverManager.getConnection(connection_uri)) { + bigQueryStatement.execute(createQuery); + PreparedStatement statement = connection.prepareStatement(insertQuery); + for (int i = 0; i < 20; ++i) { + statement.setString(1, i + "StringField"); + statement.setInt(2, i); + statement.setFloat(3, (float) (i + .6)); + statement.setInt(4, random.nextInt()); + statement.setInt(5, random.nextInt()); + statement.setBoolean(6, true); + + statement.addBatch(); + } + int[] result = statement.executeBatch(); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(result.length, resultSetRowCount(resultSet)); + + bigQueryStatement.execute(dropQuery); + + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testBulkInsertOperationStandard() throws SQLException { + String TABLE_NAME = "JDBC_BULK_INSERT_STANDARD_TABLE_" + randomNumber; + String createQuery = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`StringField` STRING,\n" + + " `IntegerField` INTEGER," + + " `FloatField` FLOAT64," + + " `NumericField` NUMERIC," + + " `BigNumericField` BIGNUMERIC," + + " `BooleanField` BOOLEAN" + + " );", + DATASET, TABLE_NAME); + String insertQuery = + String.format("INSERT INTO %s.%s VALUES(?, ?, ?,?, ?, ?);", DATASET, TABLE_NAME); + String dropQuery = String.format("DROP TABLE %s.%s", DATASET, TABLE_NAME); + String selectQuery = String.format("SELECT * FROM %s.%s", DATASET, TABLE_NAME); + + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;" + + "EnableWriteAPI=0;SWA_ActivationRowCount=50;SWA_AppendRowCount=500"; + + try (Connection connection = DriverManager.getConnection(connection_uri)) { + bigQueryStatement.execute(createQuery); + PreparedStatement statement = connection.prepareStatement(insertQuery); + for (int i = 0; i < 20; ++i) { + statement.setString(1, i + "StringField"); + statement.setInt(2, i); + statement.setFloat(3, (float) (i + .6)); + statement.setInt(4, random.nextInt()); + statement.setInt(5, random.nextInt()); + statement.setBoolean(6, true); + + statement.addBatch(); + } + int[] result = statement.executeBatch(); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertEquals(result.length, resultSetRowCount(resultSet)); + + bigQueryStatement.execute(dropQuery); + + } catch (SQLException e) { + throw new BigQueryJdbcException(e); + } + } + + @Test + public void testExecuteQueryWithSetMaxRows() throws SQLException { + String TEST_MAX_ROWS_TABLE = "JDBC_TEST_MAX_ROWS_TABLE" + random.nextInt(99); + int id1 = random.nextInt(99); + int id2 = random.nextInt(99); + String createMaxRowsTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING);", + DATASET, TEST_MAX_ROWS_TABLE); + String insertQuery1 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-1');", + DATASET, TEST_MAX_ROWS_TABLE, id1); + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-2');", + DATASET, TEST_MAX_ROWS_TABLE, id2); + String selectQuery = String.format("SELECT id, name FROM %s.%s;", DATASET, TEST_MAX_ROWS_TABLE); + + boolean executeResult = bigQueryStatement.execute(createMaxRowsTable); + assertFalse(executeResult); + int rowsInserted = bigQueryStatement.executeUpdate(insertQuery1); + assertEquals(1, rowsInserted); + rowsInserted = bigQueryStatement.executeUpdate(insertQuery2); + assertEquals(1, rowsInserted); + + bigQueryStatement.setMaxRows(1); + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + assertEquals(1, resultSetRowCount(resultSet)); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TEST_MAX_ROWS_TABLE)); + } + + @Test + public void testExecuteQueryWithoutSetMaxRows() throws SQLException { + String TEST_MAX_ROWS_TABLE = "JDBC_TEST_MAX_ROWS_TABLE" + random.nextInt(99); + int id1 = random.nextInt(99); + int id2 = random.nextInt(99); + String createMaxRowsTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING);", + DATASET, TEST_MAX_ROWS_TABLE); + String insertQuery1 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-1');", + DATASET, TEST_MAX_ROWS_TABLE, id1); + String insertQuery2 = + String.format( + "INSERT INTO %s.%s (id, name) VALUES (%s, 'max-rows-test-2');", + DATASET, TEST_MAX_ROWS_TABLE, id2); + String selectQuery = String.format("SELECT id, name FROM %s.%s;", DATASET, TEST_MAX_ROWS_TABLE); + + boolean executeResult = bigQueryStatement.execute(createMaxRowsTable); + assertFalse(executeResult); + int rowsInserted = bigQueryStatement.executeUpdate(insertQuery1); + assertEquals(1, rowsInserted); + rowsInserted = bigQueryStatement.executeUpdate(insertQuery2); + assertEquals(1, rowsInserted); + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + assertEquals(2, resultSetRowCount(resultSet)); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TEST_MAX_ROWS_TABLE)); + } + + @Test + public void testQueryPropertySessionIdIsUsedWithTransaction() + throws SQLException, InterruptedException { + // setup + String sessionId = getSessionId(); + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String beginTransaction = "BEGIN TRANSACTION; "; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + String commitTransaction = "COMMIT TRANSACTION;"; + + String transactionQuery = + beginTransaction + + insertQuery + + insertQuery + + updateQuery + + selectQuery + + commitTransaction; + + bigQueryStatement.execute(createTransactionTable); + + // Run the transaction + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";QueryProperties=session_id=" + + sessionId + + ";"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + statement.execute(transactionQuery); + + // Test each query's result with getMoreResults + int resultsCount = 0; + boolean hasMoreResult = statement.getMoreResults(); + while (hasMoreResult || statement.getUpdateCount() != -1) { + if (statement.getUpdateCount() == -1) { + ResultSet result = statement.getResultSet(); + assertTrue(result.next()); + assertEquals(-1, statement.getUpdateCount()); + } else { + assertTrue(statement.getUpdateCount() > -1); + } + hasMoreResult = statement.getMoreResults(); + resultsCount++; + } + assertEquals(5, resultsCount); + + // Check the transaction was actually committed. + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + assertEquals(14, resultSet.getInt(3)); + } + assertEquals(2, rowCount); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testRollbackOnConnectionClosed() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (15, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 12 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + connection.close(); + + // Separate query to check if transaction rollback worked + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + } + + @Test + public void testSingleStatementTransaction() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String beginTransaction = "BEGIN TRANSACTION; "; + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + String commitTransaction = "COMMIT TRANSACTION;"; + + String transactionQuery = + beginTransaction + + insertQuery + + insertQuery + + updateQuery + + selectQuery + + commitTransaction; + + bigQueryStatement.execute(createTransactionTable); + + // Run the transaction + Connection connection = DriverManager.getConnection(session_enabled_connection_uri); + Statement statement = connection.createStatement(); + statement.execute(transactionQuery); + + // Test each query's result with getMoreResults + int resultsCount = 0; + boolean hasMoreResult = statement.getMoreResults(); + while (hasMoreResult || statement.getUpdateCount() != -1) { + if (statement.getUpdateCount() == -1) { + ResultSet result = statement.getResultSet(); + assertTrue(result.next()); + assertEquals(-1, statement.getUpdateCount()); + } else { + assertTrue(statement.getUpdateCount() > -1); + } + hasMoreResult = statement.getMoreResults(); + resultsCount++; + } + assertEquals(5, resultsCount); + + // Check the transaction was actually committed. + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + assertEquals(14, resultSet.getInt(3)); + } + assertEquals(2, rowCount); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + connection.close(); + } + + @Test + public void testConnectionClosedRollsBackStartedTransactions() throws SQLException { + String TRANSACTION_TABLE = "JDBC_TRANSACTION_TABLE" + random.nextInt(99); + String createTransactionTable = + String.format( + "CREATE OR REPLACE TABLE %s.%s (`id` INTEGER, `name` STRING, `age` INTEGER);", + DATASET, TRANSACTION_TABLE); + String insertQuery = + String.format( + "INSERT INTO %s.%s (id, name, age) VALUES (12, 'Farhan', %s);", + DATASET, TRANSACTION_TABLE, randomNumber); + String updateQuery = + String.format( + "UPDATE %s.%s SET age = 14 WHERE age = %s;", DATASET, TRANSACTION_TABLE, randomNumber); + String selectQuery = + String.format("SELECT id, name, age FROM %s.%s WHERE id = 12;", DATASET, TRANSACTION_TABLE); + + bigQueryStatement.execute(createTransactionTable); + + BigQueryConnection connection = + (BigQueryConnection) DriverManager.getConnection(session_enabled_connection_uri); + connection.setAutoCommit(false); + Statement statement = connection.createStatement(); + assertTrue(connection.isTransactionStarted()); + + boolean status = statement.execute(insertQuery); + assertFalse(status); + int rows = statement.executeUpdate(updateQuery); + assertEquals(1, rows); + status = statement.execute(selectQuery); + assertTrue(status); + connection.close(); + + // Separate query to check if transaction rollback worked + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertFalse(resultSet.next()); + + bigQueryStatement.execute( + String.format("DROP TABLE IF EXISTS %S.%s", DATASET, TRANSACTION_TABLE)); + } + + @Test + public void testStatelessQueryPathSmall() throws SQLException { + Properties jobCreationMode = new Properties(); + jobCreationMode.setProperty("JobCreationMode", "2"); + Connection bigQueryConnectionUseStateless = + DriverManager.getConnection(connection_uri, jobCreationMode); + + Statement statement = bigQueryConnectionUseStateless.createStatement(); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 850"; + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(850, resultSetRowCount(jsonResultSet)); + + String queryEmpty = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 0"; + ResultSet jsonResultSetEmpty = statement.executeQuery(queryEmpty); + assertTrue(jsonResultSetEmpty.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(0, resultSetRowCount(jsonResultSetEmpty)); + bigQueryConnectionUseStateless.close(); + } + + @Test + public void testFastQueryPathMedium() throws SQLException { + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 9000"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(9000, resultSetRowCount(jsonResultSet)); + } + + @Test + public void testFastQueryPathLarge() throws SQLException { + String query = + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 18000"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(18000, resultSetRowCount(jsonResultSet)); + } + + @Test + // reads using ReadAPI and makes sure that they are in order, which implies threads worked + // correctly + public void testIterateOrderArrowMultiThread() throws SQLException { + int expectedCnt = 200000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet rs = bigQueryStatement.executeQuery(longQuery); + int cnt = 0; + double oldTriDis = 0.0d; + while (rs.next()) { + double tripDis = rs.getDouble("trip_distance"); + ++cnt; + assertTrue(oldTriDis <= tripDis); + oldTriDis = tripDis; + } + assertEquals(expectedCnt, cnt); // all the records were retrieved + } + + @Test + public void testNonEnabledUseLegacySQLThrowsSyntaxError() throws SQLException { + // setup + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "OAuthType=3;" + + "ProjectId=" + + PROJECT_ID + + ";"; + String selectLegacyQuery = + "SELECT * FROM [bigquery-public-data.deepmind_alphafold.metadata] LIMIT 20000000;"; + Driver driver = BigQueryDriver.getRegisteredDriver(); + Connection connection = driver.connect(connection_uri, new Properties()); + Statement statement = connection.createStatement(); + + // act & assertion + assertThrows( + BigQueryJdbcSqlSyntaxErrorException.class, () -> statement.execute(selectLegacyQuery)); + connection.close(); + } + + @Test + public void testFastQueryPathEmpty() throws SQLException { + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT" + + " 0"; + ResultSet jsonResultSet = bigQueryStatement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(0, resultSetRowCount(jsonResultSet)); + } + + @Test + public void testReadAPIPathLarge() throws SQLException { + Properties withReadApi = new Properties(); + withReadApi.setProperty("EnableHighThroughputAPI", "1"); + withReadApi.setProperty("HighThroughputActivationRatio", "2"); + withReadApi.setProperty("HighThroughputMinTableSize", "1000"); + withReadApi.setProperty("MaxResults", "300"); + + Connection connection = DriverManager.getConnection(connection_uri, withReadApi); + Statement statement = connection.createStatement(); + int expectedCnt = 5000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet arrowResultSet = statement.executeQuery(longQuery); + assertTrue(arrowResultSet.getClass().getName().contains("BigQueryArrowResultSet")); + assertEquals(expectedCnt, resultSetRowCount(arrowResultSet)); + arrowResultSet.close(); + connection.close(); + } + + @Test + public void testReadAPIPathLargeWithThresholdParameters() throws SQLException { + String connectionUri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;MaxResults=300;HighThroughputActivationRatio=2;" + + "HighThroughputMinTableSize=100;EnableHighThroughputAPI=1"; + Connection connection = DriverManager.getConnection(connectionUri); + Statement statement = connection.createStatement(); + int expectedCnt = 1000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet arrowResultSet = statement.executeQuery(longQuery); + assertTrue(arrowResultSet.getClass().getName().contains("BigQueryArrowResultSet")); + assertEquals(expectedCnt, resultSetRowCount(arrowResultSet)); + arrowResultSet.close(); + connection.close(); + } + + @Test + public void testReadAPIPathLargeWithThresholdNotMet() throws SQLException { + String connectionUri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=3;HighThroughputActivationRatio=4;" + + "HighThroughputMinTableSize=100;EnableHighThroughputAPI=1"; + Connection connection = DriverManager.getConnection(connectionUri); + Statement statement = connection.createStatement(); + int expectedCnt = 5000; + String longQuery = String.format(BASE_QUERY, expectedCnt); + ResultSet arrowResultSet = statement.executeQuery(longQuery); + assertTrue(arrowResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertEquals(expectedCnt, resultSetRowCount(arrowResultSet)); + arrowResultSet.close(); + connection.close(); + } + + private String getSessionId() throws InterruptedException { + QueryJobConfiguration stubJobConfig = + QueryJobConfiguration.newBuilder("Select 1;").setCreateSession(true).build(); + Job job = bigQuery.create(JobInfo.of(stubJobConfig)); + job = job.waitFor(); + Job stubJob = bigQuery.getJob(job.getJobId()); + return stubJob.getStatistics().getSessionInfo().getSessionId(); + } + + private int resultSetRowCount(ResultSet resultSet) throws SQLException { + int rowCount = 0; + while (resultSet.next()) { + rowCount++; + } + return rowCount; + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITPSCBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITPSCBigQueryTest.java new file mode 100644 index 0000000000..1b73f84b66 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITPSCBigQueryTest.java @@ -0,0 +1,300 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static com.google.common.truth.Truth.assertThat; +import static java.util.Arrays.asList; +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.sql.Connection; +import java.sql.Date; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.util.Properties; +import org.junit.Test; + +public class ITPSCBigQueryTest { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + private static final String SERVICE_ACCOUNT_EMAIL = requireEnvVar("SA_EMAIL"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @Test + public void testNoOverrideTimesOut() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + assertThrows(BigQueryException.class, () -> statement.executeQuery(query)); + } + + @Test + public void testValidADCAuthenticationInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidOAuthType2AuthenticationInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=2;" + + "OAuthAccessToken=RedactedToken;" // TODO(fahmz): see if there is a way to use SMS + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "PRE_GENERATED_TOKEN", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidServiceAccountAuthenticationKeyFileInPSC() throws SQLException, IOException { + final String SERVICE_ACCOUNT_KEY = requireEnvVar("SA_SECRET"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthPvtKeyPath=" + + SERVICE_ACCOUNT_KEY + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + connection.close(); + } + + @Test + public void testValidServiceAccountAuthenticationViaEmailInPSC() throws SQLException { + final String SERVICE_ACCOUNT_KEY = requireEnvVar("SA_SECRET"); + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthServiceAcctEmail=" + + SERVICE_ACCOUNT_EMAIL + + ";OAuthPvtKey=" + + SERVICE_ACCOUNT_KEY + + ";EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = + "SELECT DISTINCT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 850"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQueryInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthPvtKeyPath=.\\google-cloud-bigquery-jdbc\\secret.json;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + Connection bigQueryConnection = DriverManager.getConnection(connection_uri, new Properties()); + Statement bigQueryStatement = bigQueryConnection.createStatement(); + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_DATATYPES_INTEGRATION_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + + ResultSet resultSet = bigQueryStatement.executeQuery(selectQuery); + assertNotNull(resultSet); + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(16, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10).toArray()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356456"), resultSet.getObject(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Timestamp.valueOf("2019-02-17 11:24:00"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + assertEquals("123-7 -19 0:24:12.000006", resultSet.getString(16)); + } + + @Test + public void testValidAllDataTypesSerializationFromSelectQueryArrowDatasetInPSC() + throws SQLException { + String DATASET = "JDBC_INTEGRATION_DATASET"; + String TABLE_NAME = "JDBC_INTEGRATION_ARROW_TEST_TABLE"; + String selectQuery = "select * from " + DATASET + "." + TABLE_NAME; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=0;" + + "OAuthPvtKeyPath=.\\google-cloud-bigquery-jdbc\\secret.json;;" + + "EnableHighThroughputAPI=1;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "READ_API=bigquerystorage-privateendpoint.p.googleapis.com:443," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + // Read data via JDBC + Connection connection = DriverManager.getConnection(connection_uri); + Statement statement = connection.createStatement(); + ResultSet resultSet = statement.executeQuery(selectQuery); + assertNotNull(resultSet); + + ResultSetMetaData resultSetMetaData = resultSet.getMetaData(); + resultSet.next(); + assertEquals(15, resultSetMetaData.getColumnCount()); + assertTrue(resultSet.getBoolean(1)); + assertEquals(33, resultSet.getInt(2)); + assertEquals(50.05f, resultSet.getFloat(3), 0.0); + assertEquals(123.456, resultSet.getDouble(4), 0.0); + assertEquals(123.456789, resultSet.getDouble(5), 0.0); + assertEquals("testString", resultSet.getString(6)); + assertEquals("Test String", new String(resultSet.getBytes(7), StandardCharsets.UTF_8)); + Struct expectedStruct = (Struct) resultSet.getObject(8); + assertThat(expectedStruct.getAttributes()).isEqualTo(asList("Eric", 10).toArray()); + assertEquals("{\"v\":{\"f\":[{\"v\":\"Eric\"},{\"v\":\"10\"}]}}", expectedStruct.toString()); + assertArrayEquals( + new String[] {"one", "two", "three"}, (String[]) resultSet.getArray(9).getArray()); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getObject(10)); + assertEquals(Timestamp.valueOf("2020-04-27 18:07:25.356"), resultSet.getTimestamp(10)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getObject(11)); + assertEquals(Date.valueOf("2019-1-12"), resultSet.getDate(11)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getObject(12)); + assertEquals(Time.valueOf("14:00:00"), resultSet.getTime(12)); + assertEquals(Timestamp.valueOf("2022-01-22 22:22:12.142265"), resultSet.getObject(13)); + assertEquals("POINT(1 2)", resultSet.getString(14)); + assertEquals("{\"class\":{\"students\":[{\"name\":\"Jane\"}]}}", resultSet.getString(15)); + connection.close(); + } + + @Test + public void testValidExternalAccountAuthenticationInPSC() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;PROJECTID=" + + PROJECT_ID + + ";OAUTHTYPE=4;" + + "BYOID_AudienceUri=//iam.googleapis.com/projects//locations//workloadIdentityPools//providers/;" + + "BYOID_SubjectTokenType=;" + + "BYOID_CredentialSource={\"file\":\"/path/to/file\"};" + + "BYOID_SA_Impersonation_Uri=;" + + "BYOID_TokenUri=;" + + "EndpointOverrides=BIGQUERY=https://bigquery-privateendpoint.p.googleapis.com," + + "OAuth2=https://oauth2-privateendpoint.p.googleapis.com/token;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "EXTERNAL_ACCOUNT_AUTH", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + + Statement statement = connection.createStatement(); + ResultSet resultSet = + statement.executeQuery( + "SELECT repository_name FROM `bigquery-public-data.samples.github_timeline` LIMIT 50"); + + assertNotNull(resultSet); + connection.close(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITProxyBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITProxyBigQueryTest.java new file mode 100644 index 0000000000..fbde5ecc67 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITProxyBigQueryTest.java @@ -0,0 +1,234 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertThrows; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.exception.BigQueryJdbcException; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Arrays; +import java.util.List; +import org.junit.Ignore; +import org.junit.Test; +import org.junit.experimental.runners.Enclosed; +import org.junit.runner.RunWith; +import org.junit.runners.Parameterized; + +@RunWith(Enclosed.class) +public class ITProxyBigQueryTest { + static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + static final String PROXY_HOST = "34.94.167.18"; + static final String PROXY_PORT = "3128"; + static final String PROXY_UID = "fahmz"; + static final String PROXY_PWD = "fahmz"; + + public static class NonParameterizedProxyTests { + @Test + public void testValidAuthenticatedProxy() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";ProxyUid=" + + PROXY_UID + + ";ProxyPwd=" + + PROXY_PWD + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(result); + connection.close(); + } + + @Test + public void testAuthenticatedProxyWithOutAuthDetailsThrows() throws SQLException { + String query = "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";"; + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.execute(query)); + } + + @Test + public void testNonExistingProxyTimesOut() throws SQLException { + String query = "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"; + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=111.12.111.11;" // If the test fails you may have this ip address + // assigned + + "ProxyPort=1111;"; + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + assertThrows(BigQueryJdbcException.class, () -> statement.execute(query)); + } + + @Test + @Ignore // Run this when Proxy server has no authentication otherwise you'll get a "407 Proxy + // Authentication Required". + public void testNonAuthenticatedProxy() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(result); + connection.close(); + } + + @Test + public void testValidNonProxyConnectionQueries() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "Select * FROM `bigquery-public-data.samples.github_timeline` LIMIT 180"); + assertTrue(result); + connection.close(); + } + + @Test + public void testReadAPIEnabledWithProxySettings() throws SQLException { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + "ProxyHost=" + + PROXY_HOST + + ";ProxyPort=" + + PROXY_PORT + + ";EnableHighThroughputAPI=1;" + + "ProxyUid=" + + PROXY_UID + + ";ProxyPwd=" + + PROXY_PWD + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + Statement statement = connection.createStatement(); + boolean result = + statement.execute( + "SELECT * FROM `bigquery-public-data.covid19_open_data_eu.covid19_open_data` LIMIT 200000"); + assertTrue(result); + connection.close(); + } + } + + @RunWith(Parameterized.class) + public static class ParametrizedMissingPropertiesTest { + private final String ProxyHost; + private final String ProxyPort; + private final String ProxyUid; + private final String ProxyPwd; + + public ParametrizedMissingPropertiesTest( + String ProxyHost, String ProxyPort, String ProxyUid, String ProxyPwd) { + this.ProxyHost = ProxyHost; + this.ProxyPort = ProxyPort; + this.ProxyUid = ProxyUid; + this.ProxyPwd = ProxyPwd; + } + + @Parameterized.Parameters + public static List ProxyParameters() { + String proxyHost = "ProxyHost=" + PROXY_HOST + ";"; + String proxyPort = "ProxyPort=" + PROXY_PORT + ";"; + String proxyUid = "ProxyUid=" + PROXY_UID + ";"; + String proxyPwd = "ProxyPwd=" + PROXY_PWD + ";"; + return Arrays.asList( + new String[][] { + {"", proxyPort, proxyUid, proxyPwd}, + {proxyHost, "", proxyUid, proxyPwd}, + {proxyHost, proxyPort, "", proxyPwd}, + {proxyHost, proxyPort, proxyUid, ""}, + {"", "", proxyUid, proxyPwd} + }); + } + + @Test + public void testMissingProxyParameterThrowsIllegalArgument() { + String connection_uri = + "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443;" + + "ProjectId=" + + PROJECT_ID + + ";OAuthType=3;" + + ProxyHost + + ProxyPort + + ProxyUid + + ProxyPwd; + assertThrows( + IllegalArgumentException.class, () -> DriverManager.getConnection(connection_uri)); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITTPCBigQueryTest.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITTPCBigQueryTest.java new file mode 100644 index 0000000000..3fa2d7d7e4 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/it/ITTPCBigQueryTest.java @@ -0,0 +1,220 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.it; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertNotNull; +import static org.junit.Assert.assertTrue; + +import com.google.cloud.bigquery.jdbc.BigQueryConnection; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import org.junit.Test; + +public class ITTPCBigQueryTest { + private static final String ENDPOINT_URL = System.getenv("ENDPOINT_URL"); + private static final String UNIVERSE_DOMAIN = System.getenv("UNIVERSE_DOMAIN"); + private static final String TPC_SERVICE_ACCOUNT = System.getenv("SERVICE_ACCOUNT"); + private static final String TPC_PVT_KEY = System.getenv("PRIVATE_KEY"); + private static final String TPC_ACCESS_TOKEN = System.getenv("ACCESS_TOKEN"); + private static final String TPC_PROJECT_ID = System.getenv("PROJECT_ID"); + + private static final String TPC_ENDPOINT = + (ENDPOINT_URL.isEmpty()) + ? "jdbc:bigquery://https://www.googleapis.com/bigquery/v2:443" + : "jdbc:bigquery://" + ENDPOINT_URL; + private static final String TPC_UNIVERSE_DOMAIN = + (UNIVERSE_DOMAIN.isEmpty()) ? "googleapis.com" : UNIVERSE_DOMAIN; + + // See here go/bq-cli-tpc for testing setup. + // Use the default test project. + // For the SA you will have to give it bigquery admin permissions cl/627813300 and will have to + // revert after testing. + // Plug in the values for the connection properties from the guide into the connection string. + @Test + public void testServiceAccountAuthenticationViaEmail() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthType=0;" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";" + + "OAuthServiceAcctEmail=" + + TPC_SERVICE_ACCOUNT + + ";" + + "OAuthPvtKey=" + + TPC_PVT_KEY + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT 1"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + // You will need to change the environment variable for GOOGLE_APPLICATION_CREDENTIALS to point to + // the SA key file. + @Test + public void testValidApplicationDefaultCredentialsAuthentication() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "OAuthType=3;" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "APPLICATION_DEFAULT_CREDENTIALS", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testValidPreGeneratedAccessTokenAuthentication() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "OAuthType=2;" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthAccessToken=" + + TPC_ACCESS_TOKEN + + ";" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";"; + + Connection connection = DriverManager.getConnection(connection_uri); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + assertNotNull(connection); + assertFalse(connection.isClosed()); + connection.close(); + } + + @Test + public void testSimpleQueryReturns() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthType=0;" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";" + + "OAuthServiceAcctEmail=" + + TPC_SERVICE_ACCOUNT + + ";" + + "OAuthPvtKey=" + + TPC_PVT_KEY + + ";"; // Plug in this value when testing from the key file + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + @Test + public void testServiceAccountKeyFileReturns() throws SQLException { + validateTPCEnvironment(); + String connection_uri = + TPC_ENDPOINT + + ";" + + "ProjectId=" + + TPC_PROJECT_ID + + ";" + + "OAuthType=0;" + + "universeDomain=" + + TPC_UNIVERSE_DOMAIN + + ";" + // Point the key path to where you have downloaded it to. + + "OAuthPvtKeyPath=/Users/YourPathToSecretFile/SAKeyFile.json;"; + + Connection connection = DriverManager.getConnection(connection_uri); + assertNotNull(connection); + assertFalse(connection.isClosed()); + assertEquals( + "GOOGLE_SERVICE_ACCOUNT", + ((BigQueryConnection) connection).getAuthProperties().get("OAuthType")); + String query = "SELECT * FROM test.test;"; + Statement statement = connection.createStatement(); + ResultSet jsonResultSet = statement.executeQuery(query); + assertTrue(jsonResultSet.getClass().getName().contains("BigQueryJsonResultSet")); + connection.close(); + } + + private void validateTPCEnvironment() { + if (TPC_PROJECT_ID.isEmpty()) { + throw new IllegalArgumentException("TPC_PROJECT_ID is empty"); + } + if (TPC_SERVICE_ACCOUNT.isEmpty()) { + throw new IllegalArgumentException("TPC_SERVICE_ACCOUNT is empty"); + } + if (TPC_ENDPOINT.isEmpty()) { + throw new IllegalArgumentException("TPC_ENDPOINT is empty"); + } + if (TPC_PVT_KEY.isEmpty()) { + throw new IllegalArgumentException("TPC_PVT_KEY is empty"); + } + if (TPC_UNIVERSE_DOMAIN.isEmpty()) { + throw new IllegalArgumentException("TPC_UNIVERSE_DOMAIN is empty"); + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/rules/TimeZoneRule.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/rules/TimeZoneRule.java new file mode 100644 index 0000000000..ff5db108e8 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/rules/TimeZoneRule.java @@ -0,0 +1,57 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.rules; + +import java.util.TimeZone; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +public class TimeZoneRule implements TestRule { + + private final String timeZoneId; + private final TimeZone defaultTimeZone; + + public TimeZoneRule(String timeZoneId) { + this.timeZoneId = timeZoneId; + defaultTimeZone = TimeZone.getDefault(); + } + + @Override + public Statement apply(Statement base, Description description) { + return new Statement() { + @Override + public void evaluate() throws Throwable { + try { + TimeZone.setDefault(TimeZone.getTimeZone(timeZoneId)); + base.evaluate(); + } finally { + TimeZone.setDefault(defaultTimeZone); + } + } + }; + } + + /** + * Public method to enforce the rule from places like methods annotated with {@link + * org.junit.runners.Parameterized.Parameters} annotation which gets executed before this rule is + * applied. + */ + public void enforce() { + TimeZone.setDefault(TimeZone.getTimeZone(timeZoneId)); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/ArrowUtilities.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/ArrowUtilities.java new file mode 100644 index 0000000000..13f3007667 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/ArrowUtilities.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.utils; + +import com.google.api.core.InternalApi; +import com.google.protobuf.ByteString; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.nio.channels.Channels; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.VectorUnloader; +import org.apache.arrow.vector.ipc.WriteChannel; +import org.apache.arrow.vector.ipc.message.ArrowRecordBatch; +import org.apache.arrow.vector.ipc.message.MessageSerializer; +import org.apache.arrow.vector.types.pojo.Schema; + +@InternalApi("Used for testing purpose") +public class ArrowUtilities { + + public static ByteString serializeSchema(Schema schema) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + MessageSerializer.serialize(new WriteChannel(Channels.newChannel(out)), schema); + return ByteString.readFrom(new ByteArrayInputStream(out.toByteArray())); + } + + public static ByteString serializeVectorSchemaRoot(VectorSchemaRoot root) throws IOException { + ByteArrayOutputStream out = new ByteArrayOutputStream(); + + ArrowRecordBatch recordBatch = new VectorUnloader(root).getRecordBatch(); + MessageSerializer.serialize(new WriteChannel(Channels.newChannel(out)), recordBatch); + return ByteString.readFrom(new ByteArrayInputStream(out.toByteArray())); + + // ArrowStreamWriter writer = new ArrowStreamWriter(root, null, Channels.newChannel(out)); + // writer.start(); + // writer.writeBatch(); + // writer.end(); + // return ByteString.readFrom(new ByteArrayInputStream(out.toByteArray())); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/TestUtilities.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/TestUtilities.java new file mode 100644 index 0000000000..419cb9b0bc --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/TestUtilities.java @@ -0,0 +1,143 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.utils; + +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldList; +import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.stream.Collectors; +import javax.sql.ConnectionEvent; +import javax.sql.ConnectionEventListener; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.JsonStringHashMap; + +public class TestUtilities { + + public static Tuple primitiveSchemaAndValue( + StandardSQLTypeName typeName, String value) { + Field schema = + Field.newBuilder(typeName.name() + "_column", typeName).setMode(Mode.NULLABLE).build(); + FieldValue primitiveValue = FieldValue.of(Attribute.PRIMITIVE, value); + + return Tuple.of(schema, primitiveValue); + } + + public static Tuple arraySchemaAndValue( + StandardSQLTypeName typeName, String... values) { + Field schema = + Field.newBuilder(typeName.name() + "_arr_column", typeName).setMode(Mode.REPEATED).build(); + + FieldValue arrayValues = + FieldValue.of( + Attribute.REPEATED, + FieldValueList.of( + Arrays.stream(values) + .map(value -> FieldValue.of(Attribute.PRIMITIVE, value)) + .collect(Collectors.toList()))); + + return Tuple.of(schema, arrayValues); + } + + public static Tuple, ArrayList> nestedResultSetToColumnLists( + ResultSet resultSet) throws SQLException { + ArrayList indexes = new ArrayList<>(); + ArrayList columnValues = new ArrayList<>(); + while (resultSet.next()) { + indexes.add(resultSet.getObject(1)); + columnValues.add((T) resultSet.getObject(2)); + } + return Tuple.of(indexes, columnValues); + } + + @SafeVarargs + public static Tuple> arrowStructOf( + Tuple... tuples) { + ArrayList fields = new ArrayList<>(); + JsonStringHashMap values = new JsonStringHashMap<>(); + + for (Tuple tuple : tuples) { + StandardSQLTypeName typeName = tuple.x(); + Object value = tuple.y(); + String name = typeName.name() + "_column"; + Field schema = Field.newBuilder(name, typeName).setMode(Mode.NULLABLE).build(); + fields.add(schema); + values.put(name, value); + } + return Tuple.of(FieldList.of(fields), values); + } + + public static Tuple> arrowArraySchemaAndValue( + StandardSQLTypeName typeName, T... values) { + Field schema = + Field.newBuilder(typeName.name() + "_arr_column", typeName).setMode(Mode.REPEATED).build(); + + JsonStringArrayList arrayValues = new JsonStringArrayList<>(); + arrayValues.addAll(Arrays.asList(values)); + return Tuple.of(schema, arrayValues); + } + + @SafeVarargs + public static JsonStringArrayList arrowArrayOf(T... values) { + JsonStringArrayList arrayValues = new JsonStringArrayList<>(); + arrayValues.addAll(Arrays.asList(values)); + return arrayValues; + } + + // struct of arrays + public static JsonStringHashMap toArrowStruct( + Iterable>> schemaAndValues) { + JsonStringHashMap struct = new JsonStringHashMap<>(); + for (Tuple> schemaAndValue : schemaAndValues) { + Field schema = schemaAndValue.x(); + JsonStringArrayList value = schemaAndValue.y(); + struct.put(schema.getName(), value); + } + return struct; + } + + public static class TestConnectionListener implements ConnectionEventListener { + private int connectionClosedCount = 0; + private int connectionErrorCount = 0; + + @Override + public void connectionClosed(ConnectionEvent arg0) { + connectionClosedCount++; + } + + @Override + public void connectionErrorOccurred(ConnectionEvent arg0) { + connectionErrorCount++; + } + + public int getConnectionClosedCount() { + return connectionClosedCount; + } + + public int getConnectionErrorCount() { + return connectionErrorCount; + } + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/URIBuilder.java b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/URIBuilder.java new file mode 100644 index 0000000000..ce34f42f54 --- /dev/null +++ b/google-cloud-bigquery-jdbc/src/test/java/com/google/cloud/bigquery/jdbc/utils/URIBuilder.java @@ -0,0 +1,34 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.jdbc.utils; + +public class URIBuilder { + StringBuilder builder; + + public URIBuilder(String baseUri) { + builder = new StringBuilder(baseUri); + } + + public URIBuilder append(String key, Object value) { + builder.append(String.format("%s=%s;", key, value.toString())); + return this; + } + + public String toString() { + return builder.toString(); + } +} diff --git a/google-cloud-bigquery-jdbc/src/test/resources/fake.p12 b/google-cloud-bigquery-jdbc/src/test/resources/fake.p12 new file mode 100644 index 0000000000..d9e064411d Binary files /dev/null and b/google-cloud-bigquery-jdbc/src/test/resources/fake.p12 differ diff --git a/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_nopass.jks b/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_nopass.jks new file mode 100644 index 0000000000..c408465500 Binary files /dev/null and b/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_nopass.jks differ diff --git a/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_withpass.jks b/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_withpass.jks new file mode 100644 index 0000000000..824be2d6f4 Binary files /dev/null and b/google-cloud-bigquery-jdbc/src/test/resources/test_truststore_withpass.jks differ diff --git a/google-cloud-bigquery/clirr-ignored-differences.xml b/google-cloud-bigquery/clirr-ignored-differences.xml index 6c1fd8cd6c..cef0f30f73 100644 --- a/google-cloud-bigquery/clirr-ignored-differences.xml +++ b/google-cloud-bigquery/clirr-ignored-differences.xml @@ -4,7 +4,227 @@ 7013 - com/google/cloud/bigquery/RoutineInfo$Builder - com.google.cloud.bigquery.RoutineInfo$Builder setDeterminismLevel(java.lang.String) + com/google/cloud/bigquery/ExternalTableDefinition* + *TimeZone(*) - \ No newline at end of file + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *DateFormat(*) + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *DatetimeFormat(*) + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *TimeFormat(*) + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *TimestampFormat(*) + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *SourceColumnMatch(*) + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *NullMarkers(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *TimeZone(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *DateFormat(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *DatetimeFormat(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *TimeFormat(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *TimestampFormat(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *SourceColumnMatch(*) + + + 7013 + com/google/cloud/bigquery/LoadJobConfiguration* + *NullMarkers(*) + + + 7004 + com/google/cloud/bigquery/BigQueryRetryHelper + java.lang.Object runWithRetries(java.util.concurrent.Callable, com.google.api.gax.retrying.RetrySettings, com.google.api.gax.retrying.ResultRetryAlgorithm, com.google.api.core.ApiClock, com.google.cloud.bigquery.BigQueryRetryConfig) + A Tracer object is needed to use Otel and runWithRetries is only called in a few files, so it should be fine to update the signature + + + 7004 + com/google/cloud/bigquery/spi/v2/BigQueryRpc + com.google.api.services.bigquery.model.GetQueryResultsResponse getQueryResultsWithRowLimit(java.lang.String, java.lang.String, java.lang.String, java.lang.Integer) + getQueryResultsWithRowLimit is just used by ConnectionImpl at the moment so it should be fine to update the signature instead of writing an overloaded method + + + 7004 + com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc + com.google.api.services.bigquery.model.GetQueryResultsResponse getQueryResultsWithRowLimit(java.lang.String, java.lang.String, java.lang.String, java.lang.Integer) + getQueryResultsWithRowLimit is just used by ConnectionImpl at the moment so it should be fine to update the signature instead of writing an overloaded method + + + 7006 + com/google/cloud/bigquery/BigQueryOptions* + *getBigQueryRpcV2(*) + com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc + getBigQueryRpcV2 is protected and is only used within the BigQuery package + + + 7013 + com/google/cloud/bigquery/ExternalTableDefinition* + *ReferenceFileSchemaUri(*) + + + 7013 + com/google/cloud/bigquery/RoutineInfo* + *DataGovernanceType(*) + + + 7013 + com/google/cloud/bigquery/RoutineInfo* + *RemoteFunctionOptions(*) + + + 7013 + com/google/cloud/bigquery/TableInfo* + *ResourceTags(*) + + + 7012 + com/google/cloud/bigquery/BigQuery + java.lang.Object queryWithTimeout(com.google.cloud.bigquery.QueryJobConfiguration, com.google.cloud.bigquery.JobId, java.lang.Long, com.google.cloud.bigquery.BigQuery$JobOption[]) + + + 7012 + com/google/cloud/bigquery/Connection + com.google.common.util.concurrent.ListenableFuture executeSelectAsync(java.lang.String) + + + 7012 + com/google/cloud/bigquery/Connection + com.google.common.util.concurrent.ListenableFuture executeSelectAsync(java.lang.String, java.util.List, java.util.Map[]) + + + 7013 + com/google/cloud/bigquery/DatasetInfo* + *DefaultCollation(*) + + + 7013 + com/google/cloud/bigquery/TableInfo* + *DefaultCollation(*) + + + 7013 + com/google/cloud/bigquery/TableInfo* + *CloneDefinition(*) + + + 7013 + com/google/cloud/bigquery/StandardTableDefinition* + *TableConstraints(*) + + + 7013 + com/google/cloud/bigquery/StandardTableDefinition* + *NumActiveLogicalBytes(*) + + + 7013 + com/google/cloud/bigquery/StandardTableDefinition* + *NumActivePhysicalBytes(*) + + + 7013 + com/google/cloud/bigquery/StandardTableDefinition* + *NumLongTermLogicalBytes(*) + + + 7013 + com/google/cloud/bigquery/StandardTableDefinition* + *NumLongTermPhysicalBytes(*) + + + 7013 + com/google/cloud/bigquery/StandardTableDefinition* + *NumTimeTravelPhysicalBytes(*) + + + 7013 + com/google/cloud/bigquery/StandardTableDefinition* + *NumTotalLogicalBytes(*) + + + 7013 + com/google/cloud/bigquery/StandardTableDefinition* + *NumTotalPhysicalBytes(*) + + + 7013 + com/google/cloud/bigquery/TableInfo* + *TableConstraints(*) + + + 7013 + com/google/cloud/bigquery/IndexUnusedReason* + *BaseTableId(*) + + + 7002 + com/google/cloud/bigquery/IndexUnusedReason* + *BaseTable(*) + + + 7013 + com/google/cloud/bigquery/DatasetInfo* + *setExternalDatasetReference(*) + + + 7013 + com/google/cloud/bigquery/DatasetInfo* + *setStorageBillingModel(*) + + + 7013 + com/google/cloud/bigquery/StandardTableDefinition* + *BigLakeConfiguration(*) + + + 7013 + com/google/cloud/bigquery/DatasetInfo* + *setMaxTimeTravelHours(*) + + + 7013 + com/google/cloud/bigquery/DatasetInfo* + *setResourceTags(*) + + \ No newline at end of file diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml index 1e5b150576..88076302b3 100644 --- a/google-cloud-bigquery/pom.xml +++ b/google-cloud-bigquery/pom.xml @@ -3,7 +3,7 @@ 4.0.0 com.google.cloud google-cloud-bigquery - 1.127.5-SNAPSHOT + 2.60.1-SNAPSHOT jar BigQuery https://github.com/googleapis/java-bigquery @@ -11,7 +11,7 @@ com.google.cloud google-cloud-bigquery-parent - 1.127.5-SNAPSHOT + 2.60.1-SNAPSHOT google-cloud-bigquery @@ -27,7 +27,7 @@ com.google.http-client - google-http-client-jackson2 + google-http-client-gson com.google.cloud @@ -46,6 +46,14 @@ org.checkerframework checker-compat-qual + + org.checkerframework + checker-qual + + + com.google.auth + google-auth-library-credentials + com.google.auth google-auth-library-oauth2-http @@ -77,9 +85,37 @@ com.google.api gax + + com.google.code.gson + gson + org.threeten - threetenbp + threeten-extra + + + com.google.protobuf + protobuf-java + + + com.google.cloud + google-cloud-bigquerystorage + + + com.google.api.grpc + proto-google-cloud-bigquerystorage-v1 + + + org.apache.arrow + arrow-vector + + + org.apache.arrow + arrow-memory-core + + + org.apache.arrow + arrow-memory-netty @@ -88,15 +124,62 @@ error_prone_annotations + + + io.opentelemetry + opentelemetry-api + + + io.opentelemetry + opentelemetry-context + + - junit - junit + com.google.api + gax + testlib + test + + + com.google.cloud + google-cloud-datacatalog + test + + + com.google.cloud + google-cloud-bigqueryconnection + test + + + com.google.api.grpc + proto-google-cloud-bigqueryconnection-v1 + test + + + com.google.cloud + google-cloud-storage + test + + + org.junit.jupiter + junit-jupiter-api + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.mockito + mockito-junit-jupiter test com.google.truth truth + test org.mockito @@ -104,14 +187,50 @@ test - org.assertj - assertj-core + com.google.cloud + google-cloud-datacatalog + test + + + com.google.api.grpc + proto-google-cloud-datacatalog-v1 + test + + + + + io.opentelemetry + opentelemetry-sdk + test + + + io.opentelemetry + opentelemetry-sdk-common + test + + + io.opentelemetry + opentelemetry-sdk-trace test + + + + org.apache.maven.plugins + maven-failsafe-plugin + 3.5.2 + + + org.apache.maven.surefire + surefire-junit-platform + ${surefire.version} + + + org.codehaus.mojo @@ -124,7 +243,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.2.0 + 3.6.1 add-source @@ -146,4 +265,54 @@ + + + + arrow-config + + [9,) + + + + + org.apache.maven.plugins + maven-compiler-plugin + + UTF-8 + true + + -J--add-opens=java.base/java.nio=org.apache.arrow.memory.core,ALL-UNNAMED + -J--add-opens=java.base/java.nio=java-base,ALL-UNNAMED + + + + + + + + java17 + + [17,) + + + !jvm + + + + + + org.apache.maven.plugins + maven-surefire-plugin + + --add-opens=java.base/java.nio=ALL-UNNAMED + + + + + + + diff --git a/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java b/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java index e6fc16e176..9d05f6b2b4 100644 --- a/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java +++ b/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java @@ -16,17 +16,17 @@ package com.google.cloud.bigquery.benchmark; -import com.google.api.client.json.jackson2.JacksonFactory; +import com.google.api.client.json.gson.GsonFactory; import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryOptions; import com.google.cloud.bigquery.FieldValue; import com.google.cloud.bigquery.QueryJobConfiguration; import com.google.cloud.bigquery.TableResult; import java.io.FileInputStream; +import java.time.Clock; +import java.time.Duration; +import java.time.Instant; import java.util.List; -import org.threeten.bp.Clock; -import org.threeten.bp.Duration; -import org.threeten.bp.Instant; public class Benchmark { @@ -40,7 +40,7 @@ public static void main(String[] args) throws Exception { return; } String[] requests = - new JacksonFactory() + new GsonFactory() .createJsonParser(new FileInputStream(args[0])) .parseAndClose(String[].class); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/AbstractJdbcResultSet.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/AbstractJdbcResultSet.java new file mode 100644 index 0000000000..5b82469250 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/AbstractJdbcResultSet.java @@ -0,0 +1,910 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import java.io.InputStream; +import java.io.Reader; +import java.math.BigDecimal; +import java.net.URL; +import java.sql.*; +import java.util.Calendar; +import java.util.Map; + +abstract class AbstractJdbcResultSet implements ResultSet { + + @Override + public BigDecimal getBigDecimal(String columnLabel, int scale) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public void close() throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public boolean wasNull() throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public byte getByte(int columnIndex) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public byte getByte(String column) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public short getShort(int columnIndex) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public float getFloat(int columnIndex) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public BigDecimal getBigDecimal(int columnIndex, int scale) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public InputStream getAsciiStream(int columnIndex) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public InputStream getUnicodeStream(int columnIndex) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public InputStream getBinaryStream(int columnIndex) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public int findColumn(String columnLabel) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public Reader getCharacterStream(int columnIndex) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public Reader getCharacterStream(String columnLabel) throws SQLException { + // TODO: Implement the logic + throw new RuntimeException("Not implemented"); + } + + @Override + public SQLWarning getWarnings() throws SQLException { + return null; + } + + @Override + public void clearWarnings() throws SQLException {} + + @Override + public String getCursorName() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean isLast() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void beforeFirst() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void afterLast() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean first() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean last() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean absolute(int row) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean relative(int rows) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean previous() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void setFetchDirection(int direction) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public int getFetchDirection() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void setFetchSize(int rows) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public int getFetchSize() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public int getType() throws SQLException { + return TYPE_FORWARD_ONLY; + } + + @Override + public int getConcurrency() throws SQLException { + return CONCUR_READ_ONLY; + } + + @Override + public boolean rowUpdated() throws SQLException { + return false; + } + + @Override + public boolean rowInserted() throws SQLException { + return false; + } + + @Override + public boolean rowDeleted() throws SQLException { + return false; + } + + @Override + public void updateNull(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBoolean(int columnIndex, boolean x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateByte(int columnIndex, byte x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateShort(int columnIndex, short x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateInt(int columnIndex, int x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateLong(int columnIndex, long x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateFloat(int columnIndex, float x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateDouble(int columnIndex, double x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBigDecimal(int columnIndex, BigDecimal x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateString(int columnIndex, String x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBytes(int columnIndex, byte[] x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateDate(int columnIndex, Date x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateTime(int columnIndex, Time x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateTimestamp(int columnIndex, Timestamp x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, int length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, int length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, int length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateObject(int columnIndex, Object x, int scaleOrLength) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateObject(int columnIndex, Object x, SQLType type, int scaleOrLength) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateObject(int columnIndex, Object x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateObject(int columnIndex, Object x, SQLType type) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNull(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBoolean(String columnLabel, boolean x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateByte(String columnLabel, byte x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateShort(String columnLabel, short x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateInt(String columnLabel, int x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateLong(String columnLabel, long x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateFloat(String columnLabel, float x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateDouble(String columnLabel, double x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBigDecimal(String columnLabel, BigDecimal x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateString(String columnLabel, String x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBytes(String columnLabel, byte[] x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateDate(String columnLabel, Date x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateTime(String columnLabel, Time x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateTimestamp(String columnLabel, Timestamp x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, int length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, int length) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, int length) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateObject(String columnLabel, Object x, int scaleOrLength) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateObject(String columnLabel, Object x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateObject(String columnLabel, Object x, SQLType type) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateObject(String columnLabel, Object x, SQLType type, int scaleOrLength) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void insertRow() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateRow() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void deleteRow() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void refreshRow() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void cancelRowUpdates() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void moveToInsertRow() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void moveToCurrentRow() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Ref getRef(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Ref getRef(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateRef(int columnIndex, Ref x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateRef(String columnLabel, Ref x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBlob(int columnIndex, Blob x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBlob(String columnLabel, Blob x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateClob(int columnIndex, Clob x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateClob(String columnLabel, Clob x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateArray(int columnIndex, Array x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateArray(String columnLabel, Array x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public RowId getRowId(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public RowId getRowId(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateRowId(int columnIndex, RowId x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateRowId(String columnLabel, RowId x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNString(int columnIndex, String nString) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNString(String columnLabel, String nString) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNClob(int columnIndex, NClob nClob) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNClob(String columnLabel, NClob nClob) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public SQLXML getSQLXML(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public SQLXML getSQLXML(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateSQLXML(int columnIndex, SQLXML xmlObject) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateSQLXML(String columnLabel, SQLXML xmlObject) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader, long length) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x, long length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x, long length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x, long length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x, long length) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x, long length) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader, long length) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream, long length) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream, long length) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateClob(int columnIndex, Reader reader, long length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateClob(String columnLabel, Reader reader, long length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNClob(int columnIndex, Reader reader, long length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNClob(String columnLabel, Reader reader, long length) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNCharacterStream(int columnIndex, Reader x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateAsciiStream(int columnIndex, InputStream x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBinaryStream(int columnIndex, InputStream x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateCharacterStream(int columnIndex, Reader x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateAsciiStream(String columnLabel, InputStream x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBinaryStream(String columnLabel, InputStream x) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateCharacterStream(String columnLabel, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBlob(int columnIndex, InputStream inputStream) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateBlob(String columnLabel, InputStream inputStream) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateClob(int columnIndex, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateClob(String columnLabel, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNClob(int columnIndex, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void updateNClob(String columnLabel, Reader reader) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean isBeforeFirst() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean isAfterLast() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean isFirst() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public int getRow() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Statement getStatement() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Object getObject(int columnIndex, Map> map) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Blob getBlob(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Clob getClob(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Array getArray(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public float getFloat(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Object getObject(String columnLabel, Map> map) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Blob getBlob(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Clob getClob(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Array getArray(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Date getDate(int columnIndex, Calendar cal) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Date getDate(String columnLabel, Calendar cal) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Time getTime(int columnIndex, Calendar cal) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Time getTime(String columnLabel, Calendar cal) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Timestamp getTimestamp(int columnIndex, Calendar cal) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Timestamp getTimestamp(String columnLabel, Calendar cal) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public URL getURL(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public URL getURL(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public int getHoldability() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean isClosed() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public NClob getNClob(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public NClob getNClob(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public String getNString(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public String getNString(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Reader getNCharacterStream(int columnIndex) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Reader getNCharacterStream(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public T getObject(int columnIndex, Class type) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public T getObject(String columnLabel, Class type) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public short getShort(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public InputStream getAsciiStream(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public InputStream getUnicodeStream(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public InputStream getBinaryStream(String columnLabel) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public ResultSetMetaData getMetaData() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public T unwrap(Class iface) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public boolean isWrapperFor(Class iface) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java index 48ff863420..d52124092a 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java @@ -20,16 +20,19 @@ import com.google.api.core.ApiFunction; import com.google.api.services.bigquery.model.Dataset.Access; +import com.google.api.services.bigquery.model.DatasetAccessEntry; +import com.google.api.services.bigquery.model.Expr; import com.google.cloud.StringEnumType; import com.google.cloud.StringEnumValue; import java.io.Serializable; +import java.util.List; import java.util.Objects; /** * Access Control for a BigQuery Dataset. BigQuery uses ACLs to manage permissions on datasets. ACLs * are not directly supported on tables. A table inherits its ACL from the dataset that contains it. * Project roles affect your ability to run jobs or manage the project, while dataset roles affect - * how you can access or modify the data inside of a project. + * how you can access or modify the data inside a project. * * @see Access Control */ @@ -39,6 +42,7 @@ public final class Acl implements Serializable { private final Entity entity; private final Role role; + private final Expr condition; /** * Dataset roles supported by BigQuery. @@ -80,12 +84,17 @@ public static Role valueOfStrict(String constant) { return type.valueOfStrict(constant); } - /** Get the Role for the given String constant, and allow unrecognized values. */ + /** + * @param constant + * @return Get the Role for the given String constant, and allow unrecognized values. + */ public static Role valueOf(String constant) { return type.valueOf(constant); } - /** Return the known values for Role. */ + /** + * @return Return the known values for Role. + */ public static Role[] values() { return type.values(); } @@ -105,7 +114,8 @@ public enum Type { USER, VIEW, IAM_MEMBER, - ROUTINE + ROUTINE, + DATASET } Entity(Type type) { @@ -119,6 +129,11 @@ public Type getType() { abstract Access toPb(); static Entity fromPb(Access access) { + if (access.getDataset() != null) { + return new DatasetAclEntity( + DatasetId.fromPb(access.getDataset().getDataset()), + access.getDataset().getTargetTypes()); + } if (access.getDomain() != null) { return new Domain(access.getDomain()); } @@ -146,6 +161,73 @@ static Entity fromPb(Access access) { } } + /** + * Class for a BigQuery DatasetAclEntity ACL entity. Objects of this class represent a + * DatasetAclEntity from a different DatasetAclEntity to grant access to. Only views are supported + * for now. The role field is not required when this field is set. If that DatasetAclEntity is + * deleted and re-created, its access needs to be granted again via an update operation. + */ + public static final class DatasetAclEntity extends Entity { + + private static final long serialVersionUID = -8392885851733136526L; + + private final DatasetId id; + private final List targetTypes; + + /** + * Creates a DatasetAclEntity given the DatasetAclEntity's id. + * + * @param id + * @param targetTypes + */ + public DatasetAclEntity(DatasetId id, List targetTypes) { + super(Type.DATASET); + this.id = id; + this.targetTypes = targetTypes; + } + + /** + * @return Returns DatasetAclEntity's identity. + */ + public DatasetId getId() { + return id; + } + + public List getTargetTypes() { + return targetTypes; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + DatasetAclEntity datasetAclEntity = (DatasetAclEntity) obj; + return Objects.equals(getType(), datasetAclEntity.getType()) + && Objects.equals(id, datasetAclEntity.id) + && Objects.equals(targetTypes, datasetAclEntity.targetTypes); + } + + @Override + public int hashCode() { + return Objects.hash(getType(), id); + } + + @Override + public String toString() { + return toPb().toString(); + } + + @Override + Access toPb() { + return new Access() + .setDataset(new DatasetAccessEntry().setDataset(id.toPb()).setTargetTypes(targetTypes)); + } + } + /** * Class for a BigQuery Domain entity. Objects of this class represent a domain to grant access * to. Any users signed in with the domain specified will be granted the specified access. @@ -162,7 +244,9 @@ public Domain(String domain) { this.domain = domain; } - /** Returns the domain name. */ + /** + * @return Returns the domain name. + */ public String getDomain() { return domain; } @@ -223,9 +307,9 @@ public Group(String identifier) { } /** - * Returns group's identifier, can be either a - * special group identifier or a group email. + * @return Returns group's identifier, can be either a + * special group identifier or a group email. */ public String getIdentifier() { return identifier; @@ -270,22 +354,30 @@ Access toPb() { } } - /** Returns a Group entity representing all project's owners. */ + /** + * @return Returns a Group entity representing all project's owners. + */ public static Group ofProjectOwners() { return new Group(PROJECT_OWNERS); } - /** Returns a Group entity representing all project's readers. */ + /** + * @return Returns a Group entity representing all project's readers. + */ public static Group ofProjectReaders() { return new Group(PROJECT_READERS); } - /** Returns a Group entity representing all project's writers. */ + /** + * @return Returns a Group entity representing all project's writers. + */ public static Group ofProjectWriters() { return new Group(PROJECT_WRITERS); } - /** Returns a Group entity representing all BigQuery authenticated users. */ + /** + * @return Returns a Group entity representing all BigQuery authenticated users. + */ public static Group ofAllAuthenticatedUsers() { return new Group(ALL_AUTHENTICATED_USERS); } @@ -307,7 +399,9 @@ public User(String email) { this.email = email; } - /** Returns user's email. */ + /** + * @return Returns user's email. + */ public String getEmail() { return email; } @@ -342,9 +436,10 @@ Access toPb() { /** * Class for a BigQuery View entity. Objects of this class represent a view from a different - * dataset to grant access to. Queries executed against that view will have read access to tables - * in this dataset. The role field is not required when this field is set. If that view is updated - * by any user, access to the view needs to be granted again via an update operation. + * datasetAclEntity to grant access to. Queries executed against that view will have read access + * to tables in this datasetAclEntity. The role field is not required when this field is set. If + * that view is updated by any user, access to the view needs to be granted again via an update + * operation. */ public static final class View extends Entity { @@ -358,7 +453,9 @@ public View(TableId id) { this.id = id; } - /** Returns table's identity. */ + /** + * @return Returns table's identity. + */ public TableId getId() { return id; } @@ -393,10 +490,10 @@ Access toPb() { /** * Class for a BigQuery Routine entity. Objects of this class represent a routine from a different - * dataset to grant access to. Queries executed against that routine will have read access to - * views/tables/routines in this dataset. Only UDF is supported for now. The role field is not - * required when this field is set. If that routine is updated by any user, access to the routine - * needs to be granted again via an update operation. + * datasetAclEntity to grant access to. Queries executed against that routine will have read + * access to views/tables/routines in this datasetAclEntity. Only UDF is supported for now. The + * role field is not required when this field is set. If that routine is updated by any user, + * access to the routine needs to be granted again via an update operation. */ public static final class Routine extends Entity { @@ -410,7 +507,9 @@ public Routine(RoutineId id) { this.id = id; } - /** Returns routine's identity. */ + /** + * @return Returns routine's identity. + */ public RoutineId getId() { return id; } @@ -458,7 +557,9 @@ public IamMember(String iamMember) { this.iamMember = iamMember; } - /** Returns iamMember. */ + /** + * @return Returns iamMember. + */ public String getIamMember() { return iamMember; } @@ -491,24 +592,175 @@ Access toPb() { } } + /** Expr represents the conditional information related to dataset access policies. */ + public static final class Expr implements Serializable { + // Textual representation of an expression in Common Expression Language syntax. + private final String expression; + + /** + * Optional. Title for the expression, i.e. a short string describing its purpose. This can be + * used e.g. in UIs which allow to enter the expression. + */ + private final String title; + + /** + * Optional. Description of the expression. This is a longer text which describes the + * expression, e.g. when hovered over it in a UI. + */ + private final String description; + + /** + * Optional. String indicating the location of the expression for error reporting, e.g. a file + * name and a position in the file. + */ + private final String location; + + private static final long serialVersionUID = 7358264726377291156L; + + static final class Builder { + private String expression; + private String title; + private String description; + private String location; + + Builder() {} + + Builder(Expr expr) { + this.expression = expr.expression; + this.title = expr.title; + this.description = expr.description; + this.location = expr.location; + } + + Builder(com.google.api.services.bigquery.model.Expr bqExpr) { + this.expression = bqExpr.getExpression(); + if (bqExpr.getTitle() != null) { + this.title = bqExpr.getTitle(); + } + if (bqExpr.getDescription() != null) { + this.description = bqExpr.getDescription(); + } + if (bqExpr.getLocation() != null) { + this.location = bqExpr.getLocation(); + } + } + + public Builder setExpression(String expression) { + this.expression = expression; + return this; + } + + public Builder setTitle(String title) { + this.title = title; + return this; + } + + public Builder setDescription(String description) { + this.description = description; + return this; + } + + public Builder setLocation(String location) { + this.location = location; + return this; + } + + public Expr build() { + return new Expr(this); + } + } + + public Expr(Builder builder) { + this.expression = builder.expression; + this.title = builder.title; + this.description = builder.description; + this.location = builder.location; + } + + public Expr(String expression, String title, String description, String location) { + this.expression = expression; + this.title = title; + this.description = description; + this.location = location; + } + + com.google.api.services.bigquery.model.Expr toPb() { + com.google.api.services.bigquery.model.Expr bqExpr = + new com.google.api.services.bigquery.model.Expr(); + bqExpr.setExpression(this.expression); + bqExpr.setTitle(this.title); + bqExpr.setDescription(this.description); + bqExpr.setLocation(this.location); + return bqExpr; + } + + static Expr fromPb(com.google.api.services.bigquery.model.Expr bqExpr) { + return new Builder(bqExpr).build(); + } + + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public int hashCode() { + return Objects.hash(expression, title, description, location); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + final Expr other = (Expr) obj; + return Objects.equals(this.expression, other.expression) + && Objects.equals(this.title, other.title) + && Objects.equals(this.description, other.description) + && Objects.equals(this.location, other.location); + } + + @Override + public String toString() { + return toPb().toString(); + } + } + private Acl(Entity entity, Role role) { + this(entity, role, null); + } + + private Acl(Entity entity, Role role, Expr condition) { this.entity = checkNotNull(entity); this.role = role; + this.condition = condition; } - /** Returns the entity for this ACL. */ + /** + * @return Returns the entity for this ACL. + */ public Entity getEntity() { return entity; } - /** Returns the role specified by this ACL. */ + /** + * @return Returns the role specified by this ACL. + */ public Role getRole() { return role; } /** - * Returns an Acl object. - * + * @return Returns the condition specified by this ACL. + */ + public Expr getCondition() { + return condition; + } + + /** + * @return Returns an Acl object. * @param entity the entity for this ACL object * @param role the role to associate to the {@code entity} object */ @@ -516,19 +768,37 @@ public static Acl of(Entity entity, Role role) { return new Acl(entity, role); } - /** Returns an Acl object for a view entity. */ + public static Acl of(Entity entity, Role role, Expr condition) { + return new Acl(entity, role, condition); + } + + /** + * @param datasetAclEntity + * @return Returns an Acl object for a datasetAclEntity. + */ + public static Acl of(DatasetAclEntity datasetAclEntity) { + return new Acl(datasetAclEntity, null); + } + + /** + * @param view + * @return Returns an Acl object for a view entity. + */ public static Acl of(View view) { return new Acl(view, null); } - /** Returns an Acl object for a routine entity. */ + /** + * @param routine + * @return Returns an Acl object for a routine entity. + */ public static Acl of(Routine routine) { return new Acl(routine, null); } @Override public int hashCode() { - return Objects.hash(entity, role); + return Objects.hash(entity, role, condition); } @Override @@ -545,7 +815,9 @@ public boolean equals(Object obj) { return false; } final Acl other = (Acl) obj; - return Objects.equals(this.entity, other.entity) && Objects.equals(this.role, other.role); + return Objects.equals(this.entity, other.entity) + && Objects.equals(this.role, other.role) + && Objects.equals(this.condition, other.condition); } Access toPb() { @@ -553,11 +825,16 @@ Access toPb() { if (role != null) { accessPb.setRole(role.name()); } + if (condition != null) { + accessPb.setCondition(condition.toPb()); + } return accessPb; } static Acl fromPb(Access access) { return Acl.of( - Entity.fromPb(access), access.getRole() != null ? Role.valueOf(access.getRole()) : null); + Entity.fromPb(access), + access.getRole() != null ? Role.valueOf(access.getRole()) : null, + access.getCondition() != null ? Expr.fromPb(access.getCondition()) : null); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Labels.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Annotations.java similarity index 87% rename from google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Labels.java rename to google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Annotations.java index 8f91aa37ae..5d8d1e7e0c 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Labels.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Annotations.java @@ -28,9 +28,9 @@ import javax.annotation.Nullable; @AutoValue -abstract class Labels implements Serializable { +abstract class Annotations implements Serializable { private static final long serialVersionUID = 1L; - static final Labels ZERO = of(Collections.emptyMap()); + static final Annotations ZERO = of(Collections.emptyMap()); @Nullable abstract Map userMap(); @@ -59,13 +59,13 @@ Map toPb() { return Collections.unmodifiableMap(pbMap); } - private static Labels of(Map userMap) { + private static Annotations of(Map userMap) { Preconditions.checkArgument( userMap == null || !userMap.containsKey(null), "null keys are not supported"); - return new AutoValue_Labels(userMap); + return new AutoValue_Annotations(userMap); } - static Labels fromUser(Map map) { + static Annotations fromUser(Map map) { if (map == null || map instanceof ImmutableMap) { return of(map); } @@ -73,7 +73,7 @@ static Labels fromUser(Map map) { return of(Collections.unmodifiableMap(new HashMap<>(map))); } - static Labels fromPb(Map pb) { + static Annotations fromPb(Map pb) { if (Data.isNull(pb)) { return of(null); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/AvroOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/AvroOptions.java new file mode 100644 index 0000000000..dd5964f2db --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/AvroOptions.java @@ -0,0 +1,112 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.common.base.MoreObjects; +import java.util.Objects; + +/** + * Google BigQuery options for AVRO format. This class wraps some properties of AVRO files used by + * BigQuery to parse external data. + */ +public final class AvroOptions extends FormatOptions { + + private static final long serialVersionUID = 2293570529308612712L; + + private final Boolean useAvroLogicalTypes; + + public static final class Builder { + + private Boolean useAvroLogicalTypes; + + private Builder() {} + + private Builder(AvroOptions avroOptions) { + this.useAvroLogicalTypes = avroOptions.useAvroLogicalTypes; + } + + /** + * [Optional] Sets whether BigQuery should interpret logical types as the corresponding BigQuery + * data type (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER). + */ + public Builder setUseAvroLogicalTypes(boolean useAvroLogicalTypes) { + this.useAvroLogicalTypes = useAvroLogicalTypes; + return this; + } + + /** Creates a {@code AvroOptions} object. */ + public AvroOptions build() { + return new AvroOptions(this); + } + } + + private AvroOptions(Builder builder) { + super(FormatOptions.AVRO); + this.useAvroLogicalTypes = builder.useAvroLogicalTypes; + } + + /** + * Returns whether BigQuery should interpret logical types as the corresponding BigQuery data type + * (for example, TIMESTAMP), instead of using the raw type (for example, INTEGER). + */ + public Boolean useAvroLogicalTypes() { + return useAvroLogicalTypes; + } + + public Builder toBuilder() { + return new Builder(this); + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("type", getType()) + .add("useAvroLogicalTypes", useAvroLogicalTypes) + .toString(); + } + + @Override + public int hashCode() { + return Objects.hash(getType(), useAvroLogicalTypes); + } + + @Override + public boolean equals(Object obj) { + return obj == this + || obj instanceof AvroOptions && Objects.equals(toPb(), ((AvroOptions) obj).toPb()); + } + + com.google.api.services.bigquery.model.AvroOptions toPb() { + com.google.api.services.bigquery.model.AvroOptions avroOptions = + new com.google.api.services.bigquery.model.AvroOptions(); + avroOptions.setUseAvroLogicalTypes(useAvroLogicalTypes); + return avroOptions; + } + + /** Returns a builder for a AvroOptions object. */ + public static AvroOptions.Builder newBuilder() { + return new AvroOptions.Builder(); + } + + static AvroOptions fromPb(com.google.api.services.bigquery.model.AvroOptions avroOptions) { + Builder builder = newBuilder(); + if (avroOptions.getUseAvroLogicalTypes() != null) { + builder.setUseAvroLogicalTypes(avroOptions.getUseAvroLogicalTypes()); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BiEngineReason.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BiEngineReason.java new file mode 100644 index 0000000000..e682ba2461 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BiEngineReason.java @@ -0,0 +1,92 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import javax.annotation.Nullable; + +@AutoValue +public abstract class BiEngineReason implements Serializable { + + @AutoValue.Builder + public abstract static class Builder { + + /** + * High-level BI Engine reason for partial or disabled acceleration. + * + * @param code code or {@code null} for none + */ + public abstract Builder setCode(String code); + + /** + * Free form human-readable reason for partial or disabled acceleration. + * + * @param message message or {@code null} for none + */ + public abstract Builder setMessage(String message); + + /** Creates a {@code BiEngineReason} object. */ + public abstract BiEngineReason build(); + } + + /** + * High-level BI Engine reason for partial or disabled acceleration. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract String getCode(); + + /** + * Free form human-readable reason for partial or disabled acceleration. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract String getMessage(); + + public abstract Builder toBuilder(); + + public static Builder newBuilder() { + return new AutoValue_BiEngineReason.Builder(); + } + + com.google.api.services.bigquery.model.BiEngineReason toPb() { + com.google.api.services.bigquery.model.BiEngineReason biEngineReasonPb = + new com.google.api.services.bigquery.model.BiEngineReason(); + if (getCode() != null) { + biEngineReasonPb.setCode(getCode()); + } + if (getMessage() != null) { + biEngineReasonPb.setMessage(getMessage()); + } + return biEngineReasonPb; + } + + static BiEngineReason fromPb( + com.google.api.services.bigquery.model.BiEngineReason biEngineReasonPb) { + Builder builder = newBuilder(); + if (biEngineReasonPb.getCode() != null) { + builder.setCode(biEngineReasonPb.getCode()); + } + if (biEngineReasonPb.getMessage() != null) { + builder.setMessage(biEngineReasonPb.getMessage()); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BiEngineStats.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BiEngineStats.java new file mode 100644 index 0000000000..34d6c43268 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BiEngineStats.java @@ -0,0 +1,101 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.api.services.bigquery.model.BiEngineStatistics; +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nullable; + +/** BIEngineStatistics contains query statistics specific to the use of BI Engine. */ +@AutoValue +public abstract class BiEngineStats implements Serializable { + + @AutoValue.Builder + public abstract static class Builder { + /** + * Specifies which mode of BI Engine acceleration was performed (if any). + * + * @param biEngineMode biEngineMode or {@code null} for none + */ + public abstract Builder setBiEngineMode(String biEngineMode); + + /** + * In case of DISABLED or PARTIAL bi_engine_mode, these contain the explanatory reasons as to + * why BI Engine could not accelerate. In case the full query was accelerated, this field is not + * populated. + * + * @param biEngineReasons biEngineReasons or {@code null} for none + */ + public abstract Builder setBiEngineReasons(List biEngineReasons); + + /** Creates a @code BiEngineStats} object. */ + public abstract BiEngineStats build(); + } + + /** + * Specifies which mode of BI Engine acceleration was performed (if any). + * + * @return value or {@code null} for none + */ + @Nullable + public abstract String getBiEngineMode(); + + /** + * In case of DISABLED or PARTIAL bi_engine_mode, these contain the explanatory reasons as to why + * BI Engine could not accelerate. In case the full query was accelerated, this field is not + * populated. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract List getBiEngineReasons(); + + public abstract Builder toBuilder(); + + public static Builder newBuilder() { + return new AutoValue_BiEngineStats.Builder(); + } + + BiEngineStatistics toPb() { + BiEngineStatistics biEngineStatisticsPb = new BiEngineStatistics(); + if (getBiEngineMode() != null) { + biEngineStatisticsPb.setBiEngineMode(getBiEngineMode()); + } + if (getBiEngineReasons() != null) { + biEngineStatisticsPb.setBiEngineReasons( + getBiEngineReasons().stream().map(BiEngineReason::toPb).collect(Collectors.toList())); + } + return biEngineStatisticsPb; + } + + static BiEngineStats fromPb(BiEngineStatistics biEngineStatisticsPb) { + Builder builder = newBuilder(); + if (biEngineStatisticsPb.getBiEngineMode() != null) { + builder.setBiEngineMode(biEngineStatisticsPb.getBiEngineMode()); + } + if (biEngineStatisticsPb.getBiEngineReasons() != null) { + builder.setBiEngineReasons( + biEngineStatisticsPb.getBiEngineReasons().stream() + .map(BiEngineReason::fromPb) + .collect(Collectors.toList())); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigLakeConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigLakeConfiguration.java new file mode 100644 index 0000000000..76b3f55d94 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigLakeConfiguration.java @@ -0,0 +1,120 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import java.io.Serializable; + +@AutoValue +public abstract class BigLakeConfiguration implements Serializable { + + private static final long serialVersionUID = -5951589238459622025L; + + /** + * Credential reference for accessing external storage system. Normalized as + * project_id.location_id.connection_id. + * + * @return value or {@code null} for none + */ + public abstract String getConnectionId(); + + /** + * Open source file format that the table data is stored in. Currently only PARQUET is supported. + * + * @return value or {@code null} for none + */ + public abstract String getFileFormat(); + + /** + * Fully qualified location prefix of the external folder where data is stored. Starts with + * "gs://" ends with "/". Does not contain "*". + * + * @return value or {@code null} for none + */ + public abstract String getStorageUri(); + + /** + * Open source file format that the table data is stored in. Currently only PARQUET is supported. + * + * @return value or {@code null} for none + */ + public abstract String getTableFormat(); + + public static Builder newBuilder() { + return new AutoValue_BigLakeConfiguration.Builder(); + } + + public abstract Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + /** + * [Required] Required and immutable. Credential reference for accessing external storage + * system. Normalized as project_id.location_id.connection_id. + * + * @param connectionId connectionId or {@code null} for none + */ + public abstract Builder setConnectionId(String connectionId); + + /** + * [Required] Required and immutable. Open source file format that the table data is stored in. + * Currently only PARQUET is supported. + * + * @param fileFormat fileFormat or {@code null} for none + */ + public abstract Builder setFileFormat(String fileFormat); + + /** + * [Required] Required and immutable. Fully qualified location prefix of the external folder + * where data is stored. Starts with "gs://" and ends with "/". Does not contain "*". + * + * @param storageUri storageUri or {@code null} for none + */ + public abstract Builder setStorageUri(String storageUri); + + /** + * [Required] Required and immutable. Open source file format that the table data is stored in. + * Currently only PARQUET is supported. + * + * @param tableFormat tableFormat or {@code null} for none + */ + public abstract Builder setTableFormat(String tableFormat); + + public abstract BigLakeConfiguration build(); + } + + com.google.api.services.bigquery.model.BigLakeConfiguration toPb() { + com.google.api.services.bigquery.model.BigLakeConfiguration biglakeConfiguration = + new com.google.api.services.bigquery.model.BigLakeConfiguration(); + biglakeConfiguration.setConnectionId(getConnectionId()); + biglakeConfiguration.setFileFormat(getFileFormat()); + biglakeConfiguration.setStorageUri(getStorageUri()); + biglakeConfiguration.setTableFormat(getTableFormat()); + + return biglakeConfiguration; + } + + static BigLakeConfiguration fromPb( + com.google.api.services.bigquery.model.BigLakeConfiguration biglakeConfigurationPb) { + return newBuilder() + .setConnectionId(biglakeConfigurationPb.getConnectionId()) + .setFileFormat(biglakeConfigurationPb.getFileFormat()) + .setStorageUri(biglakeConfigurationPb.getStorageUri()) + .setTableFormat(biglakeConfigurationPb.getTableFormat()) + .build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java index 70a29fd9ff..ab16ed40f7 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java @@ -18,6 +18,7 @@ import static com.google.common.base.Preconditions.checkArgument; +import com.google.api.core.BetaApi; import com.google.api.core.InternalApi; import com.google.api.gax.paging.Page; import com.google.cloud.FieldSelector; @@ -32,6 +33,7 @@ import java.io.Serializable; import java.util.ArrayList; import java.util.List; +import org.checkerframework.checker.nullness.qual.NonNull; /** * An interface for Google Cloud BigQuery. @@ -75,6 +77,42 @@ public String getSelector() { } } + enum DatasetView { + DATASET_VIEW_UNSPECIFIED("DATASET_VIEW_UNSPECIFIED"), + FULL("FULL"), + METADATA("METADATA"), + ACL("ACL"); + + private final String view; + + DatasetView(String view) { + this.view = view; + } + + @Override + public String toString() { + return view; + } + } + + enum DatasetUpdateMode { + UPDATE_MODE_UNSPECIFIED("UPDATE_MODE_UNSPECIFIED"), + UPDATE_FULL("UPDATE_FULL"), + UPDATE_METADATA("UPDATE_METADATA"), + UPDATE_ACL("UPDATE_ACL"); + + private final String updateMode; + + DatasetUpdateMode(String updateMode) { + this.updateMode = updateMode; + } + + @Override + public String toString() { + return updateMode; + } + } + /** * Fields of a BigQuery Table resource. * @@ -119,6 +157,20 @@ public String getSelector() { } } + /** + * Metadata of a BigQuery Table. + * + * @see Table + * Resource + */ + enum TableMetadataView { + BASIC, + FULL, + STORAGE_STATS, + TABLE_METADATA_VIEW_UNSPECIFIED; + } + /** * Fields of a BigQuery Model resource. * @@ -228,8 +280,11 @@ private DatasetListOption(BigQueryRpc.Option option, Object value) { } /** - * Returns an option to specify a label filter. See - * https://cloud.google.com/bigquery/docs/adding-using-labels#filtering_datasets_using_labels + * Returns an option to specify a label filter. @see Filtering + * using labels + * + * @param labelFilter In the form "labels.key:value" */ public static DatasetListOption labelFilter(String labelFilter) { return new DatasetListOption(BigQueryRpc.Option.LABEL_FILTER, labelFilter); @@ -270,6 +325,40 @@ public static DatasetOption fields(DatasetField... fields) { return new DatasetOption( BigQueryRpc.Option.FIELDS, Helper.selector(DatasetField.REQUIRED_FIELDS, fields)); } + + /** + * Returns an option to specify the dataset's access policy version for conditional access. If + * this option is not provided the field remains unset and conditional access cannot be used. + * Valid values are 0, 1, and 3. Requests specifying an invalid value will be rejected. Requests + * for conditional access policy binding in datasets must specify version 3. Datasets with no + * conditional role bindings in access policy may specify any valid value or leave the field + * unset. This field will be mapped to IAM Policy version and will be + * used to fetch the policy from IAM. If unset or if 0 or 1 the value is used for a dataset with + * conditional bindings, access entry with condition will have role string appended by + * 'withcond' string followed by a hash value. Please refer to Troubleshooting + * withcond for more details. + */ + public static DatasetOption accessPolicyVersion(Integer accessPolicyVersion) { + return new DatasetOption(BigQueryRpc.Option.ACCESS_POLICY_VERSION, accessPolicyVersion); + } + + /** + * Returns an option to specify the view that determines which dataset information is returned. + * By default, metadata and ACL information are returned. + */ + public static DatasetOption datasetView(DatasetView datasetView) { + return new DatasetOption(BigQueryRpc.Option.DATASET_VIEW, datasetView); + } + + /** + * Returns an option to specify the fields of dataset that update/patch operation is targeting. + * By default, both metadata and ACL fields are updated. + */ + public static DatasetOption updateMode(DatasetUpdateMode updateMode) { + return new DatasetOption(BigQueryRpc.Option.DATASET_UPDATE_MODE, updateMode); + } } /** Class for specifying dataset delete options. */ @@ -372,6 +461,19 @@ public static TableOption fields(TableField... fields) { return new TableOption( BigQueryRpc.Option.FIELDS, Helper.selector(TableField.REQUIRED_FIELDS, fields)); } + + /** + * Returns an option to specify the schema of the table (only applicable for external tables) + * should be autodetected when updating the table from the underlying source. + */ + public static TableOption autodetectSchema(boolean autodetect) { + return new TableOption(BigQueryRpc.Option.AUTODETECT_SCHEMA, autodetect); + } + + /** Returns an option to specify the metadata of the table. */ + public static TableOption tableMetadataView(TableMetadataView tableMetadataView) { + return new TableOption(BigQueryRpc.Option.TABLE_METADATA_VIEW, tableMetadataView); + } } /* Class for specifying IAM options. */ @@ -530,7 +632,7 @@ public static JobListOption fields(JobField... fields) { /** Class for specifying table get and create options. */ class JobOption extends Option { - private static final long serialVersionUID = -3111736712316353665L; + private static final long serialVersionUID = -3111736712316353664L; private JobOption(BigQueryRpc.Option option, Object value) { super(option, value); @@ -547,6 +649,16 @@ public static JobOption fields(JobField... fields) { return new JobOption( BigQueryRpc.Option.FIELDS, Helper.selector(JobField.REQUIRED_FIELDS, fields)); } + + /** Returns an option to specify the job's BigQuery retry configuration. */ + public static JobOption bigQueryRetryConfig(BigQueryRetryConfig bigQueryRetryConfig) { + return new JobOption(BigQueryRpc.Option.BIGQUERY_RETRY_CONFIG, bigQueryRetryConfig); + } + + /** Returns an option to specify the job's retry options. */ + public static JobOption retryOptions(RetryOption... options) { + return new JobOption(BigQueryRpc.Option.RETRY_OPTIONS, options); + } } /** Class for specifying query results options. */ @@ -658,20 +770,17 @@ public int hashCode() { * *

    Example of creating a dataset. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   Dataset dataset = null;
    -   *   DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
    -   *   try {
    -   *     // the dataset was created
    -   *     dataset = bigquery.create(datasetInfo);
    -   *   } catch (BigQueryException e) {
    -   *     // the dataset was not created
    -   *   }
    +   * 
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * Dataset dataset = null;
    +   * DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
    +   * try {
    +   *   // the dataset was created
    +   *   dataset = bigquery.create(datasetInfo);
    +   * } catch (BigQueryException e) {
    +   *   // the dataset was not created
        * }
    -   * 
    + * }
    * * @throws BigQueryException upon failure */ @@ -682,22 +791,19 @@ public int hashCode() { * *

    Example of creating a table. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   String fieldName = "string_field";
    -   *   TableId tableId = TableId.of(datasetName, tableName);
    -   *   // Table field definition
    -   *   Field field = Field.of(fieldName, LegacySQLTypeName.STRING);
    -   *   // Table schema definition
    -   *   Schema schema = Schema.of(field);
    -   *   TableDefinition tableDefinition = StandardTableDefinition.of(schema);
    -   *   TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build();
    -   *   Table table = bigquery.create(tableInfo);
    -   * }
    -   * 
    + *
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * String fieldName = "string_field";
    +   * TableId tableId = TableId.of(datasetName, tableName);
    +   * // Table field definition
    +   * Field field = Field.of(fieldName, LegacySQLTypeName.STRING);
    +   * // Table schema definition
    +   * Schema schema = Schema.of(field);
    +   * TableDefinition tableDefinition = StandardTableDefinition.of(schema);
    +   * TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build();
    +   * Table table = bigquery.create(tableInfo);
    +   * }
    * * @throws BigQueryException upon failure */ @@ -715,63 +821,96 @@ public int hashCode() { * *

    Example of loading a newline-delimited-json file with textual fields from GCS to a table. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
    -   *   TableId tableId = TableId.of(datasetName, tableName);
    -   *   // Table field definition
    -   *   Field[] fields = new Field[] { Field.of("name", LegacySQLTypeName.STRING),
    -   *       Field.of("post_abbr", LegacySQLTypeName.STRING) };
    -   *   // Table schema definition
    -   *   Schema schema = Schema.of(fields);
    -   *   LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri)
    -   *       .setFormatOptions(FormatOptions.json()).setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
    -   *       .setSchema(schema).build();
    -   *   // Load the table
    -   *   Job loadJob = bigquery.create(JobInfo.of(configuration));
    -   *   loadJob = loadJob.waitFor();
    -   *   // Check the table
    -   *   System.out.println("State: " + loadJob.getStatus().getState());
    -   *   return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows();
    -   * }
    -   * 
    + *
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json";
    +   * TableId tableId = TableId.of(datasetName, tableName);
    +   * // Table field definition
    +   * Field[] fields = new Field[] { Field.of("name", LegacySQLTypeName.STRING),
    +   * Field.of("post_abbr", LegacySQLTypeName.STRING) };
    +   * // Table schema definition
    +   * Schema schema = Schema.of(fields);
    +   * LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri)
    +   *     .setFormatOptions(FormatOptions.json())
    +   *     .setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED)
    +   *     .setSchema(schema).build();
    +   * // Load the table
    +   * Job loadJob = bigquery.create(JobInfo.of(configuration));
    +   * loadJob = loadJob.waitFor();
    +   * // Check the table
    +   * System.out.println("State: " + loadJob.getStatus().getState());
    +   * return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows();
    +   * }
    * *

    Example of creating a query job. * - *

    -   * {
    -   *   @code
    -   *   String query = "SELECT field FROM my_dataset_name.my_table_name";
    -   *   Job job = null;
    -   *   JobConfiguration jobConfiguration = QueryJobConfiguration.of(query);
    -   *   JobInfo jobInfo = JobInfo.of(jobConfiguration);
    -   *   try {
    -   *     job = bigquery.create(jobInfo);
    -   *   } catch (BigQueryException e) {
    -   *     // the job was not created
    -   *   }
    +   * 
    {@code
    +   * String query = "SELECT field FROM my_dataset_name.my_table_name";
    +   * Job job = null;
    +   * JobConfiguration jobConfiguration = QueryJobConfiguration.of(query);
    +   * JobInfo jobInfo = JobInfo.of(jobConfiguration);
    +   * try {
    +   *   job = bigquery.create(jobInfo);
    +   * } catch (BigQueryException e) {
    +   *   // the job was not created
        * }
    -   * 
    + * }
    * * @throws BigQueryException upon failure */ Job create(JobInfo jobInfo, JobOption... options); + /** + * Creates a new BigQuery query connection used for executing queries (not the same as BigQuery + * connection properties). It uses the BigQuery Storage Read API for high throughput queries by + * default. + * + *

    Example of creating a query connection. + * + *

    {@code
    +   * ConnectionSettings connectionSettings =
    +   *     ConnectionSettings.newBuilder()
    +   *         .setRequestTimeout(10L)
    +   *         .setMaxResults(100L)
    +   *         .setUseQueryCache(true)
    +   *         .build();
    +   * Connection connection = bigquery.createConnection(connectionSettings);
    +   * }
    + * + * @throws BigQueryException upon failure + * @param connectionSettings + */ + @BetaApi + Connection createConnection(@NonNull ConnectionSettings connectionSettings); + + /** + * Creates a new BigQuery query connection used for executing queries (not the same as BigQuery + * connection properties). It uses the BigQuery Storage Read API for high throughput queries by + * default. This overloaded method creates a Connection with default ConnectionSettings for query + * execution where default values are set for numBufferedRows (20000), useReadApi (true), + * useLegacySql (false). + * + *

    Example of creating a query connection. + * + *

    {@code
    +   * Connection connection = bigquery.createConnection();
    +   * }
    + * + * @throws BigQueryException upon failure + */ + @BetaApi + Connection createConnection(); + /** * Returns the requested dataset or {@code null} if not found. * *

    Example of getting a dataset. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset";
    -   *   Dataset dataset = bigquery.getDataset(datasetName);
    -   * }
    -   * 
    + *
    {@code
    +   * String datasetName = "my_dataset";
    +   * Dataset dataset = bigquery.getDataset(datasetName);
    +   * }
    * * @throws BigQueryException upon failure */ @@ -782,15 +921,12 @@ public int hashCode() { * *

    Example of getting a dataset. * - *

    -   * {
    -   *   @code
    -   *   String projectId = "my_project_id";
    -   *   String datasetName = "my_dataset_name";
    -   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
    -   *   Dataset dataset = bigquery.getDataset(datasetId);
    -   * }
    -   * 
    + *
    {@code
    +   * String projectId = "my_project_id";
    +   * String datasetName = "my_dataset_name";
    +   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
    +   * Dataset dataset = bigquery.getDataset(datasetId);
    +   * }
    * * @throws BigQueryException upon failure */ @@ -799,21 +935,17 @@ public int hashCode() { /** * Lists the project's datasets. This method returns partial information on each dataset: ({@link * Dataset#getDatasetId()}, {@link Dataset#getFriendlyName()} and {@link - * Dataset#getGeneratedId()}). To get complete information use either {@link #getDataset(String, - * DatasetOption...)} or {@link #getDataset(DatasetId, DatasetOption...)}. + * Dataset#getGeneratedId()}). To get complete information use {@link #getDataset}. * *

    Example of listing datasets, specifying the page size. * - *

    -   * {
    -   *   @code
    -   *   // List datasets in the default project
    -   *   Page datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
    -   *   for (Dataset dataset : datasets.iterateAll()) {
    -   *     // do something with the dataset
    -   *   }
    +   * 
    {@code
    +   * // List datasets in the default project
    +   * Page datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
    +   * for (Dataset dataset : datasets.iterateAll()) {
    +   *   // do something with the dataset
        * }
    -   * 
    + * }
    * * @throws BigQueryException upon failure */ @@ -822,22 +954,18 @@ public int hashCode() { /** * Lists the datasets in the provided project. This method returns partial information on each * dataset: ({@link Dataset#getDatasetId()}, {@link Dataset#getFriendlyName()} and {@link - * Dataset#getGeneratedId()}). To get complete information use either {@link #getDataset(String, - * DatasetOption...)} or {@link #getDataset(DatasetId, DatasetOption...)}. + * Dataset#getGeneratedId()}). To get complete information use either {@link #getDataset}. * *

    Example of listing datasets in a project, specifying the page size. * - *

    -   * {
    -   *   @code
    -   *   String projectId = "my_project_id";
    -   *   // List datasets in a specified project
    -   *   Page datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
    -   *   for (Dataset dataset : datasets.iterateAll()) {
    -   *     // do something with the dataset
    -   *   }
    +   * 
    {@code
    +   * String projectId = "my_project_id";
    +   * // List datasets in a specified project
    +   * Page datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
    +   * for (Dataset dataset : datasets.iterateAll()) {
    +   *   // do something with the dataset
        * }
    -   * 
    + * }
    * * @throws BigQueryException upon failure */ @@ -848,18 +976,15 @@ public int hashCode() { * *

    Example of deleting a dataset from its id, even if non-empty. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
    -   *   if (deleted) {
    -   *     // the dataset was deleted
    -   *   } else {
    -   *     // the dataset was not found
    -   *   }
    +   * 
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
    +   * if (deleted) {
    +   *   // the dataset was deleted
    +   * } else {
    +   *   // the dataset was not found
        * }
    -   * 
    + * }
    * * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -871,20 +996,17 @@ public int hashCode() { * *

    Example of deleting a dataset, even if non-empty. * - *

    -   * {
    -   *   @code
    -   *   String projectId = "my_project_id";
    -   *   String datasetName = "my_dataset_name";
    -   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
    -   *   boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
    -   *   if (deleted) {
    -   *     // the dataset was deleted
    -   *   } else {
    -   *     // the dataset was not found
    -   *   }
    +   * 
    {@code
    +   * String projectId = "my_project_id";
    +   * String datasetName = "my_dataset_name";
    +   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
    +   * boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
    +   * if (deleted) {
    +   *   // the dataset was deleted
    +   * } else {
    +   *   // the dataset was not found
        * }
    -   * 
    + * }
    * * @return {@code true} if dataset was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -908,21 +1030,18 @@ public int hashCode() { * *

    Example of deleting a table. * - *

    -   * {
    -   *   @code
    -   *   String projectId = "my_project_id";
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   TableId tableId = TableId.of(projectId, datasetName, tableName);
    -   *   boolean deleted = bigquery.delete(tableId);
    -   *   if (deleted) {
    -   *     // the table was deleted
    -   *   } else {
    -   *     // the table was not found
    -   *   }
    +   * 
    {@code
    +   * String projectId = "my_project_id";
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * TableId tableId = TableId.of(projectId, datasetName, tableName);
    +   * boolean deleted = bigquery.delete(tableId);
    +   * if (deleted) {
    +   *   // the table was deleted
    +   * } else {
    +   *   // the table was not found
        * }
    -   * 
    + * }
    * * @return {@code true} if table was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -934,21 +1053,18 @@ public int hashCode() { * *

    Example of deleting a model. * - *

    -   * {
    -   *   @code
    -   *   String projectId = "my_project_id";
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_model_name";
    -   *   ModelId modelId = ModelId.of(projectId, datasetName, modelName);
    -   *   boolean deleted = bigquery.delete(modelId);
    -   *   if (deleted) {
    -   *     // the model was deleted
    -   *   } else {
    -   *     // the model was not found
    -   *   }
    +   * 
    {@code
    +   * String projectId = "my_project_id";
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_model_name";
    +   * ModelId modelId = ModelId.of(projectId, datasetName, modelName);
    +   * boolean deleted = bigquery.delete(modelId);
    +   * if (deleted) {
    +   *   // the model was deleted
    +   * } else {
    +   *   // the model was not found
        * }
    -   * 
    + * }
    * * @return {@code true} if model was deleted, {@code false} if it was not found * @throws BigQueryException upon failure @@ -958,11 +1074,9 @@ public int hashCode() { /** * Deletes the requested routine. * - *

    - * Example of deleting a routine. + *

    Example of deleting a routine. * - *

    -   * {@code
    +   * 
    {@code
        * String projectId = "my_project_id";
        * String datasetId = "my_dataset_id";
        * String routineId = "my_routine_id";
    @@ -973,15 +1087,21 @@ public int hashCode() {
        * } else {
        *   // the routine was not found
        * }
    -   * 
    - * - * @return {@code true} if routine was deleted, {@code false} if it was not - * found + * }
    * + * @return {@code true} if routine was deleted, {@code false} if it was not found * @throws BigQueryException upon failure */ boolean delete(RoutineId routineId); + /** + * Deletes the requested job. + * + * @return {@code true} if job was deleted, {@code false} if it was not found + * @throws BigQueryException upon failure + */ + boolean delete(JobId jobId); + /** * Updates dataset information. * @@ -989,19 +1109,15 @@ public int hashCode() { * * - *
    -   * {
    -   *   @code
    -   *   // String datasetName = "my_dataset_name";
    -   *   // String tableName = "my_table_name";
    -   *   // String newDescription = "new_description";
    -   *
    -   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
    -   *   TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
    -   *   Table afterTable = bigquery.update(tableInfo);
    +   * 
    {@code
    +   * // String datasetName = "my_dataset_name";
    +   * // String tableName = "my_table_name";
    +   * // String newDescription = "new_description";
        *
    -   * }
    -   * 
    + * Table beforeTable = bigquery.getTable(datasetName, tableName); + * TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build(); + * Table afterTable = bigquery.update(tableInfo); + * }
    * * * @@ -1014,33 +1130,27 @@ public int hashCode() { * *

    Example of updating a table by changing its description. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   String newDescription = "new_description";
    -   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
    -   *   TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
    -   *   Table afterTable = bigquery.update(tableInfo);
    -   * }
    -   * 
    + *
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * String newDescription = "new_description";
    +   * Table beforeTable = bigquery.getTable(datasetName, tableName);
    +   * TableInfo tableInfo = beforeTable.toBuilder().setDescription(newDescription).build();
    +   * Table afterTable = bigquery.update(tableInfo);
    +   * }
    * *

    Example of updating a table by changing its expiration. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   Table beforeTable = bigquery.getTable(datasetName, tableName);
    -   *
    -   *   // Set table to expire 5 days from now.
    -   *   long expirationMillis = DateTime.now().plusDays(5).getMillis();
    -   *   TableInfo tableInfo = beforeTable.toBuilder().setExpirationTime(expirationMillis).build();
    -   *   Table afterTable = bigquery.update(tableInfo);
    -   * }
    -   * 
    + *
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * Table beforeTable = bigquery.getTable(datasetName, tableName);
    +   *
    +   * // Set table to expire 5 days from now.
    +   * long expirationMillis = DateTime.now().plusDays(5).getMillis();
    +   * TableInfo tableInfo = beforeTable.toBuilder().setExpirationTime(expirationMillis).build();
    +   * Table afterTable = bigquery.update(tableInfo);
    +   * }
    * * @throws BigQueryException upon failure */ @@ -1051,33 +1161,27 @@ public int hashCode() { * *

    Example of updating a model by changing its description. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String modelName = "my_model_name";
    -   *   String newDescription = "new_description";
    -   *   Model beforeModel = bigquery.getModel(datasetName, modelName);
    -   *   ModelInfo modelInfo = beforeModel.toBuilder().setDescription(newDescription).build();
    -   *   Model afterModel = bigquery.update(modelInfo);
    -   * }
    -   * 
    + *
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String modelName = "my_model_name";
    +   * String newDescription = "new_description";
    +   * Model beforeModel = bigquery.getModel(datasetName, modelName);
    +   * ModelInfo modelInfo = beforeModel.toBuilder().setDescription(newDescription).build();
    +   * Model afterModel = bigquery.update(modelInfo);
    +   * }
    * *

    Example of updating a model by changing its expiration. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String modelName = "my_model_name";
    -   *   Model beforeModel = bigquery.getModel(datasetName, modelName);
    -   *
    -   *   // Set model to expire 5 days from now.
    -   *   long expirationMillis = DateTime.now().plusDays(5).getMillis();
    -   *   ModelInfo modelInfo = beforeModel.toBuilder().setExpirationTime(expirationMillis).build();
    -   *   Model afterModel = bigquery.update(modelInfo);
    -   * }
    -   * 
    + *
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String modelName = "my_model_name";
    +   * Model beforeModel = bigquery.getModel(datasetName, modelName);
    +   *
    +   * // Set model to expire 5 days from now.
    +   * long expirationMillis = DateTime.now().plusDays(5).getMillis();
    +   * ModelInfo modelInfo = beforeModel.toBuilder().setExpirationTime(expirationMillis).build();
    +   * Model afterModel = bigquery.update(modelInfo);
    +   * }
    * * @throws BigQueryException upon failure */ @@ -1095,14 +1199,11 @@ public int hashCode() { * *

    Example of getting a table. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   Table table = bigquery.getTable(datasetName, tableName);
    -   * }
    -   * 
    + *
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * Table table = bigquery.getTable(datasetName, tableName);
    +   * }
    * * @throws BigQueryException upon failure */ @@ -1113,16 +1214,13 @@ public int hashCode() { * *

    Example of getting a table. * - *

    -   * {
    -   *   @code
    -   *   String projectId = "my_project_id";
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   TableId tableId = TableId.of(projectId, datasetName, tableName);
    -   *   Table table = bigquery.getTable(tableId);
    -   * }
    -   * 
    + *
    {@code
    +   * String projectId = "my_project_id";
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * TableId tableId = TableId.of(projectId, datasetName, tableName);
    +   * Table table = bigquery.getTable(tableId);
    +   * }
    * * @throws BigQueryException upon failure */ @@ -1140,16 +1238,13 @@ public int hashCode() { * *

    Example of getting a model. * - *

    -   * {
    -   *   @code
    -   *   String projectId = "my_project_id";
    -   *   String datasetName = "my_dataset_name";
    -   *   String modelName = "my_model_name";
    -   *   ModelId modelId = ModelId.of(projectId, datasetName, tableName);
    -   *   Model model = bigquery.getModel(modelId);
    -   * }
    -   * 
    + *
    {@code
    +   * String projectId = "my_project_id";
    +   * String datasetName = "my_dataset_name";
    +   * String modelName = "my_model_name";
    +   * ModelId modelId = ModelId.of(projectId, datasetName, tableName);
    +   * Model model = bigquery.getModel(modelId);
    +   * }
    * * @throws BigQueryException upon failure */ @@ -1176,54 +1271,42 @@ public int hashCode() { Page listRoutines(DatasetId datasetId, RoutineListOption... options); /** - * Lists the tables in the dataset. This method returns partial information on - * each table: ({@link Table#getTableId()}, {@link Table#getFriendlyName()}, - * {@link Table#getGeneratedId()} and type, which is part of - * {@link Table#getDefinition()}). To get complete information use either - * {@link #getTable(TableId, TableOption...)} or - * {@link #getTable(String, String, TableOption...)}. - * - *

    - * Example of listing the tables in a dataset, specifying the page size. - * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   Page

  • tables = bigquery.listTables(datasetName, TableListOption.pageSize(100)); - * for (Table table : tables.iterateAll()) { - * // do something with the table - * } + * Lists the tables in the dataset. This method returns partial information on each table: ({@link + * Table#getTableId()}, {@link Table#getFriendlyName()}, {@link Table#getGeneratedId()} and type, + * which is part of {@link Table#getDefinition()}). To get complete information use either {@link + * #getTable}. + * + *

    Example of listing the tables in a dataset, specifying the page size. + * + *

    {@code
    +   * String datasetName = "my_dataset_name";
    +   * Page
    tables = bigquery.listTables(datasetName, TableListOption.pageSize(100)); + * for (Table table : tables.iterateAll()) { + * // do something with the table * } - * + * } * * @throws BigQueryException upon failure */ Page
    listTables(String datasetId, TableListOption... options); /** - * Lists the tables in the dataset. This method returns partial information on - * each table: ({@link Table#getTableId()}, {@link Table#getFriendlyName()}, - * {@link Table#getGeneratedId()} and type, which is part of - * {@link Table#getDefinition()}). To get complete information use either - * {@link #getTable(TableId, TableOption...)} or - * {@link #getTable(String, String, TableOption...)}. - * - *

    - * Example of listing the tables in a dataset. - * - *

    -   * {
    -   *   @code
    -   *   String projectId = "my_project_id";
    -   *   String datasetName = "my_dataset_name";
    -   *   DatasetId datasetId = DatasetId.of(projectId, datasetName);
    -   *   Page
    tables = bigquery.listTables(datasetId, TableListOption.pageSize(100)); - * for (Table table : tables.iterateAll()) { - * // do something with the table - * } + * Lists the tables in the dataset. This method returns partial information on each table: ({@link + * Table#getTableId()}, {@link Table#getFriendlyName()}, {@link Table#getGeneratedId()} and type, + * which is part of {@link Table#getDefinition()}). To get complete information use either {@link + * #getTable}. + * + *

    Example of listing the tables in a dataset. + * + *

    {@code
    +   * String projectId = "my_project_id";
    +   * String datasetName = "my_dataset_name";
    +   * DatasetId datasetId = DatasetId.of(projectId, datasetName);
    +   * Page
    tables = bigquery.listTables(datasetId, TableListOption.pageSize(100)); + * for (Table table : tables.iterateAll()) { + * // do something with the table * } - * + * } * * @throws BigQueryException upon failure */ @@ -1246,33 +1329,30 @@ public int hashCode() { * *

    Example of inserting rows into a table without running a load job. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   TableId tableId = TableId.of(datasetName, tableName);
    -   *   // Values of the row to insert
    -   *   Map rowContent = new HashMap<>();
    -   *   rowContent.put("booleanField", true);
    -   *   // Bytes are passed in base64
    -   *   rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64
    -   *   // Records are passed as a map
    -   *   Map recordsContent = new HashMap<>();
    -   *   recordsContent.put("stringField", "Hello, World!");
    -   *   rowContent.put("recordField", recordsContent);
    -   *   InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow("rowId", rowContent)
    -   *       // More rows can be added in the same RPC by invoking .addRow() on the
    -   *       // builder
    -   *       .build());
    -   *   if (response.hasErrors()) {
    -   *     // If any of the insertions failed, this lets you inspect the errors
    -   *     for (Entry> entry : response.getInsertErrors().entrySet()) {
    -   *       // inspect row error
    -   *     }
    +   * 
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * TableId tableId = TableId.of(datasetName, tableName);
    +   * // Values of the row to insert
    +   * Map rowContent = new HashMap<>();
    +   * rowContent.put("booleanField", true);
    +   * // Bytes are passed in base64
    +   * rowContent.put("bytesField", "Cg0NDg0="); // 0xA, 0xD, 0xD, 0xE, 0xD in base64
    +   * // Records are passed as a map
    +   * Map recordsContent = new HashMap<>();
    +   * recordsContent.put("stringField", "Hello, World!");
    +   * rowContent.put("recordField", recordsContent);
    +   * InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId).addRow("rowId", rowContent)
    +   *     // More rows can be added in the same RPC by invoking .addRow() on the
    +   *     // builder
    +   *     .build());
    +   * if (response.hasErrors()) {
    +   *   // If any of the insertions failed, this lets you inspect the errors
    +   *   for (Entry> entry : response.getInsertErrors().entrySet()) {
    +   *     // inspect row error
        *   }
        * }
    -   * 
    + * }
    * * @throws BigQueryException upon failure */ @@ -1283,20 +1363,17 @@ public int hashCode() { * *

    Example of listing table rows, specifying the page size. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   // This example reads the result 100 rows per RPC call. If there's no need
    -   *   // to limit the number,
    -   *   // simply omit the option.
    -   *   TableResult tableData = bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
    -   *   for (FieldValueList row : tableData.iterateAll()) {
    -   *     // do something with the row
    -   *   }
    +   * 
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * // This example reads the result 100 rows per RPC call. If there's no need
    +   * // to limit the number,
    +   * // simply omit the option.
    +   * TableResult tableData = bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
    +   * for (FieldValueList row : tableData.iterateAll()) {
    +   *   // do something with the row
        * }
    -   * 
    + * }
    * * @throws BigQueryException upon failure */ @@ -1307,21 +1384,18 @@ public int hashCode() { * *

    Example of listing table rows, specifying the page size. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   TableId tableIdObject = TableId.of(datasetName, tableName);
    -   *   // This example reads the result 100 rows per RPC call. If there's no need
    -   *   // to limit the number,
    -   *   // simply omit the option.
    -   *   TableResult tableData = bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
    -   *   for (FieldValueList row : tableData.iterateAll()) {
    -   *     // do something with the row
    -   *   }
    +   * 
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * TableId tableIdObject = TableId.of(datasetName, tableName);
    +   * // This example reads the result 100 rows per RPC call. If there's no need
    +   * // to limit the number,
    +   * // simply omit the option.
    +   * TableResult tableData = bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
    +   * for (FieldValueList row : tableData.iterateAll()) {
    +   *   // do something with the row
        * }
    -   * 
    + * }
    * * @throws BigQueryException upon failure */ @@ -1355,18 +1429,15 @@ TableResult listTableData( * *

    Example of listing table rows with schema. * - *

    -   * {
    -   *   @code
    -   *   Schema schema = Schema.of(Field.of("word", LegacySQLTypeName.STRING),
    -   *       Field.of("word_count", LegacySQLTypeName.STRING), Field.of("corpus", LegacySQLTypeName.STRING),
    -   *       Field.of("corpus_date", LegacySQLTypeName.STRING));
    -   *   TableResult tableData = bigquery.listTableData(TableId.of("bigquery-public-data", "samples", "shakespeare"),
    -   *       schema);
    -   *   FieldValueList row = tableData.getValues().iterator().next();
    -   *   System.out.println(row.get("word").getStringValue());
    -   * }
    -   * 
    + *
    {@code
    +   * Schema schema = Schema.of(Field.of("word", LegacySQLTypeName.STRING),
    +   *     Field.of("word_count", LegacySQLTypeName.STRING), Field.of("corpus", LegacySQLTypeName.STRING),
    +   *     Field.of("corpus_date", LegacySQLTypeName.STRING));
    +   * TableResult tableData = bigquery.listTableData(TableId.of("bigquery-public-data", "samples", "shakespeare"),
    +   *     schema);
    +   * FieldValueList row = tableData.getValues().iterator().next();
    +   * System.out.println(row.get("word").getStringValue());
    +   * }
    * * @throws BigQueryException upon failure */ @@ -1378,16 +1449,13 @@ TableResult listTableData( * *

    Example of getting a job. * - *

    -   * {
    -   *   @code
    -   *   String jobName = "my_job_name";
    -   *   Job job = bigquery.getJob(jobName);
    -   *   if (job == null) {
    -   *     // job was not found
    -   *   }
    +   * 
    {@code
    +   * String jobName = "my_job_name";
    +   * Job job = bigquery.getJob(jobName);
    +   * if (job == null) {
    +   *   // job was not found
        * }
    -   * 
    + * }
    * * @throws BigQueryException upon failure */ @@ -1399,17 +1467,14 @@ TableResult listTableData( * *

    Example of getting a job. * - *

    -   * {
    -   *   @code
    -   *   String jobName = "my_job_name";
    -   *   JobId jobIdObject = JobId.of(jobName);
    -   *   Job job = bigquery.getJob(jobIdObject);
    -   *   if (job == null) {
    -   *     // job was not found
    -   *   }
    +   * 
    {@code
    +   * String jobName = "my_job_name";
    +   * JobId jobIdObject = JobId.of(jobName);
    +   * Job job = bigquery.getJob(jobIdObject);
    +   * if (job == null) {
    +   *   // job was not found
        * }
    -   * 
    + * }
    * * @throws BigQueryException upon failure */ @@ -1420,15 +1485,12 @@ TableResult listTableData( * *

    Example of listing jobs, specifying the page size. * - *

    -   * {
    -   *   @code
    -   *   Page jobs = bigquery.listJobs(JobListOption.pageSize(100));
    -   *   for (Job job : jobs.iterateAll()) {
    -   *     // do something with the job
    -   *   }
    +   * 
    {@code
    +   * Page jobs = bigquery.listJobs(JobListOption.pageSize(100));
    +   * for (Job job : jobs.iterateAll()) {
    +   *   // do something with the job
        * }
    -   * 
    + * }
    * * @throws BigQueryException upon failure */ @@ -1436,25 +1498,21 @@ TableResult listTableData( /** * Sends a job cancel request. This call will return immediately. The job status can then be - * checked using either {@link #getJob(JobId, JobOption...)} or {@link #getJob(String, - * JobOption...)}). + * checked by using {@link #getJob}. * *

    If the location of the job is not "US" or "EU", {@link #cancel(JobId)} must be used instead. * *

    Example of cancelling a job. * - *

    -   * {
    -   *   @code
    -   *   String jobName = "my_job_name";
    -   *   boolean success = bigquery.cancel(jobName);
    -   *   if (success) {
    -   *     // job was cancelled
    -   *   } else {
    -   *     // job was not found
    -   *   }
    +   * 
    {@code
    +   * String jobName = "my_job_name";
    +   * boolean success = bigquery.cancel(jobName);
    +   * if (success) {
    +   *   // job was cancelled
    +   * } else {
    +   *   // job was not found
        * }
    -   * 
    + * }
    * * @return {@code true} if cancel was requested successfully, {@code false} if the job was not * found @@ -1464,27 +1522,23 @@ TableResult listTableData( /** * Sends a job cancel request. This call will return immediately. The job status can then be - * checked using either {@link #getJob(JobId, JobOption...)} or {@link #getJob(String, - * JobOption...)}). + * checked using {@link #getJob}. * *

    If the location of the job is not "US" or "EU", the {@code jobId} must specify the job * location. * *

    Example of cancelling a job. * - *

    -   * {
    -   *   @code
    -   *   String jobName = "my_job_name";
    -   *   JobId jobId = JobId.of(jobName);
    -   *   boolean success = bigquery.cancel(jobId);
    -   *   if (success) {
    -   *     // job was cancelled
    -   *   } else {
    -   *     // job was not found
    -   *   }
    +   * 
    {@code
    +   * String jobName = "my_job_name";
    +   * JobId jobId = JobId.of(jobName);
    +   * boolean success = bigquery.cancel(jobId);
    +   * if (success) {
    +   *   // job was cancelled
    +   * } else {
    +   *   // job was not found
        * }
    -   * 
    + * }
    * * @return {@code true} if cancel was requested successfully, {@code false} if the job was not * found @@ -1503,22 +1557,29 @@ TableResult listTableData( * *

    Example of running a query. * - *

    -   * {
    -   *   @code
    -   *   // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
    -   *   String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
    -   *   QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
    -   *
    -   *   // Print the results.
    -   *   for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    -   *     for (FieldValue val : row) {
    -   *       System.out.printf("%s,", val.toString());
    -   *     }
    -   *     System.out.printf("\n");
    +   * 
    {@code
    +   * // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
    +   * String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
    +   * QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
    +   *
    +   * // Print the results.
    +   * for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
    +   *   for (FieldValue val : row) {
    +   *     System.out.printf("%s,", val.toString());
        *   }
    +   *   System.out.printf("\n");
        * }
    -   * 
    + * }
    + * + * This method supports query-related preview features via environmental variables (enabled by + * setting the {@code QUERY_PREVIEW_ENABLED} environment variable to "TRUE"). Specifically, this + * method supports: + * + *
      + *
    • Stateless queries: query execution without corresponding job metadata + *
    + * + * The behaviour of these preview features is controlled by the bigquery service as well * * @throws BigQueryException upon failure * @throws InterruptedException if the current thread gets interrupted while waiting for the query @@ -1534,7 +1595,7 @@ TableResult query(QueryJobConfiguration configuration, JobOption... options) *

    If the location of the job is not "US" or "EU", the {@code jobId} must specify the job * location. * - *

    This method cannot be used in conjuction with {@link QueryJobConfiguration#dryRun()} + *

    This method cannot be used in conjunction with {@link QueryJobConfiguration#dryRun()} * queries. Since dry-run queries are not actually executed, there's no way to retrieve results. * *

    See {@link #query(QueryJobConfiguration, JobOption...)} for examples on populating a {@link @@ -1548,6 +1609,28 @@ TableResult query(QueryJobConfiguration configuration, JobOption... options) TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... options) throws InterruptedException, JobException; + /** + * Starts the query associated with the request, using the given JobId. It returns either + * TableResult for quick queries or Job object for long-running queries. + * + *

    If the location of the job is not "US" or "EU", the {@code jobId} must specify the job + * location. + * + *

    This method cannot be used in conjunction with {@link QueryJobConfiguration#dryRun()} + * queries. Since dry-run queries are not actually executed, there's no way to retrieve results. + * + *

    See {@link #query(QueryJobConfiguration, JobOption...)} for examples on populating a {@link + * QueryJobConfiguration}. + * + * @throws BigQueryException upon failure + * @throws InterruptedException if the current thread gets interrupted while waiting for the query + * to complete + * @throws JobException if the job completes unsuccessfully + */ + Object queryWithTimeout( + QueryJobConfiguration configuration, JobId jobId, Long timeoutMs, JobOption... options) + throws InterruptedException, JobException; + /** * Returns results of the query associated with the provided job. * @@ -1563,56 +1646,50 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... * *

    Example of creating a channel with which to write to a table. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   String csvData = "StringValue1\nStringValue2\n";
    -   *   TableId tableId = TableId.of(datasetName, tableName);
    -   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
    -   *       .setFormatOptions(FormatOptions.csv()).build();
    -   *   TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
    -   *   // Write data to writer
    -   *   try {
    -   *     writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
    -   *   } finally {
    -   *     writer.close();
    -   *   }
    -   *   // Get load job
    -   *   Job job = writer.getJob();
    -   *   job = job.waitFor();
    -   *   LoadStatistics stats = job.getStatistics();
    -   *   return stats.getOutputRows();
    +   * 
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * String csvData = "StringValue1\nStringValue2\n";
    +   * TableId tableId = TableId.of(datasetName, tableName);
    +   * WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
    +   *     .setFormatOptions(FormatOptions.csv()).build();
    +   * TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
    +   * // Write data to writer
    +   * try {
    +   *   writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
    +   * } finally {
    +   *   writer.close();
        * }
    -   * 
    + * // Get load job + * Job job = writer.getJob(); + * job = job.waitFor(); + * LoadStatistics stats = job.getStatistics(); + * return stats.getOutputRows(); + * }
    * *

    Example of writing a local file to a table. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
    -   *   String location = "us";
    -   *   TableId tableId = TableId.of(datasetName, tableName);
    -   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
    -   *       .setFormatOptions(FormatOptions.csv()).build();
    -   *   // The location must be specified; other fields can be auto-detected.
    -   *   JobId jobId = JobId.newBuilder().setLocation(location).build();
    -   *   TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
    -   *   // Write data to writer
    -   *   try (OutputStream stream = Channels.newOutputStream(writer)) {
    -   *     Files.copy(csvPath, stream);
    -   *   }
    -   *   // Get load job
    -   *   Job job = writer.getJob();
    -   *   job = job.waitFor();
    -   *   LoadStatistics stats = job.getStatistics();
    -   *   return stats.getOutputRows();
    +   * 
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv");
    +   * String location = "us";
    +   * TableId tableId = TableId.of(datasetName, tableName);
    +   * WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
    +   *     .setFormatOptions(FormatOptions.csv()).build();
    +   * // The location must be specified; other fields can be auto-detected.
    +   * JobId jobId = JobId.newBuilder().setLocation(location).build();
    +   * TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
    +   * // Write data to writer
    +   * try (OutputStream stream = Channels.newOutputStream(writer)) {
    +   *   Files.copy(csvPath, stream);
        * }
    -   * 
    + * // Get load job + * Job job = writer.getJob(); + * job = job.waitFor(); + * LoadStatistics stats = job.getStatistics(); + * return stats.getOutputRows(); + * }
    * * @throws BigQueryException upon failure */ @@ -1625,32 +1702,29 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... * *

    Example of creating a channel with which to write to a table. * - *

    -   * {
    -   *   @code
    -   *   String datasetName = "my_dataset_name";
    -   *   String tableName = "my_table_name";
    -   *   String csvData = "StringValue1\nStringValue2\n";
    -   *   String location = "us";
    -   *   TableId tableId = TableId.of(datasetName, tableName);
    -   *   WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
    -   *       .setFormatOptions(FormatOptions.csv()).build();
    -   *   // The location must be specified; other fields can be auto-detected.
    -   *   JobId jobId = JobId.newBuilder().setLocation(location).build();
    -   *   TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
    -   *   // Write data to writer
    -   *   try {
    -   *     writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
    -   *   } finally {
    -   *     writer.close();
    -   *   }
    -   *   // Get load job
    -   *   Job job = writer.getJob();
    -   *   job = job.waitFor();
    -   *   LoadStatistics stats = job.getStatistics();
    -   *   return stats.getOutputRows();
    +   * 
    {@code
    +   * String datasetName = "my_dataset_name";
    +   * String tableName = "my_table_name";
    +   * String csvData = "StringValue1\nStringValue2\n";
    +   * String location = "us";
    +   * TableId tableId = TableId.of(datasetName, tableName);
    +   * WriteChannelConfiguration writeChannelConfiguration = WriteChannelConfiguration.newBuilder(tableId)
    +   *     .setFormatOptions(FormatOptions.csv()).build();
    +   * // The location must be specified; other fields can be auto-detected.
    +   * JobId jobId = JobId.newBuilder().setLocation(location).build();
    +   * TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
    +   * // Write data to writer
    +   * try {
    +   *   writer.write(ByteBuffer.wrap(csvData.getBytes(Charsets.UTF_8)));
    +   * } finally {
    +   *   writer.close();
        * }
    -   * 
    + * // Get load job + * Job job = writer.getJob(); + * job = job.waitFor(); + * LoadStatistics stats = job.getStatistics(); + * return stats.getOutputRows(); + * }
    */ TableDataWriteChannel writer(JobId jobId, WriteChannelConfiguration writeChannelConfiguration); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryBaseService.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryBaseService.java new file mode 100644 index 0000000000..bdcefb3d96 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryBaseService.java @@ -0,0 +1,37 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import com.google.cloud.BaseService; +import com.google.cloud.ExceptionHandler; +import com.google.cloud.ServiceOptions; + +abstract class BigQueryBaseService> + extends BaseService { + + protected BigQueryBaseService(ServiceOptions options) { + super(options); + } + + public static final ExceptionHandler DEFAULT_BIGQUERY_EXCEPTION_HANDLER = + ExceptionHandler.newBuilder() + .abortOn(RuntimeException.class) + .retryOn(java.net.ConnectException.class) // retry on Connection Exception + .retryOn(java.net.UnknownHostException.class) // retry on UnknownHostException + .retryOn(java.net.SocketException.class) // retry on SocketException + .addInterceptors(EXCEPTION_HANDLER_INTERCEPTOR) + .build(); +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryDryRunResult.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryDryRunResult.java new file mode 100644 index 0000000000..0494aa1a97 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryDryRunResult.java @@ -0,0 +1,39 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.api.core.BetaApi; +import java.util.List; + +public interface BigQueryDryRunResult { + + /** Returns the schema of the results. Null if the schema is not supplied. */ + @BetaApi + Schema getSchema() throws BigQuerySQLException; + + /** + * Returns query parameters for standard SQL queries by extracting undeclare query parameters from + * the dry run job. See more information: + * https://developers.google.com/resources/api-libraries/documentation/bigquery/v2/java/latest/com/google/api/services/bigquery/model/JobStatistics2.html#getUndeclaredQueryParameters-- + */ + @BetaApi + List getQueryParameters() throws BigQuerySQLException; + + /** Returns some processing statistics */ + @BetaApi + BigQueryResultStats getStatistics() throws BigQuerySQLException; +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryDryRunResultImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryDryRunResultImpl.java new file mode 100644 index 0000000000..fabb2f2fcb --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryDryRunResultImpl.java @@ -0,0 +1,49 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import java.util.List; + +public class BigQueryDryRunResultImpl implements BigQueryDryRunResult { + private Schema schema; + private List queryParameters; + private BigQueryResultStats stats; + + BigQueryDryRunResultImpl( + Schema schema, + List queryParameters, + BigQueryResultStats stats) { // Package-Private access + this.schema = schema; + this.queryParameters = queryParameters; + this.stats = stats; + } + + @Override + public Schema getSchema() throws BigQuerySQLException { + return schema; + } + + @Override + public List getQueryParameters() throws BigQuerySQLException { + return queryParameters; + } + + @Override + public BigQueryResultStats getStatistics() throws BigQuerySQLException { + return stats; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/EmptyTableResult.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryErrorMessages.java similarity index 54% rename from google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/EmptyTableResult.java rename to google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryErrorMessages.java index 7cb5e1932f..5c86e08060 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/EmptyTableResult.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryErrorMessages.java @@ -1,5 +1,5 @@ /* - * Copyright 2018 Google LLC + * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,17 +16,12 @@ package com.google.cloud.bigquery; -import com.google.api.core.InternalApi; -import com.google.cloud.PageImpl; -import javax.annotation.Nullable; +public class BigQueryErrorMessages { + public static final String RATE_LIMIT_EXCEEDED_MSG = + "Exceeded rate limits:"; // Error Message for RateLimitExceeded Error + public static final String JOB_RATE_LIMIT_EXCEEDED_MSG = "Job exceeded rate limits:"; -public class EmptyTableResult extends TableResult { - - private static final long serialVersionUID = -4831062717210349819L; - - /** An empty {@code TableResult} to avoid making API requests to unlistable tables. */ - @InternalApi("Exposed for testing") - public EmptyTableResult(@Nullable Schema schema) { - super(schema, 0, new PageImpl(null, "", null)); + public class RetryRegExPatterns { + public static final String RATE_LIMIT_EXCEEDED_REGEX = ".*exceed.*rate.*limit.*"; } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryException.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryException.java index 8d6da0b4e9..c42ff63275 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryException.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryException.java @@ -122,8 +122,25 @@ static BaseServiceException translateAndThrow(RetryHelperException ex) { throw new BigQueryException(UNKNOWN_CODE, ex.getMessage(), ex.getCause()); } + static BaseServiceException translateAndThrow( + BigQueryRetryHelper.BigQueryRetryHelperException ex) { + if (ex.getCause() instanceof BaseServiceException) { + throw (BaseServiceException) ex.getCause(); + } + throw new BigQueryException(UNKNOWN_CODE, ex.getMessage(), ex.getCause()); + } + static BaseServiceException translateAndThrow(ExecutionException ex) { BaseServiceException.translate(ex); throw new BigQueryException(UNKNOWN_CODE, ex.getMessage(), ex.getCause()); } + + static BaseServiceException translateAndThrow(Exception ex) { + throw new BigQueryException(UNKNOWN_CODE, ex.getMessage(), ex.getCause()); + } + + static BaseServiceException translateAndThrowBigQuerySQLException(BigQueryException e) + throws BigQuerySQLException { + throw new BigQuerySQLException(e.getMessage(), e, e.getErrors()); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java index d402ce549a..320daa03a2 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java @@ -13,15 +13,14 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package com.google.cloud.bigquery; -import static com.google.cloud.RetryHelper.runWithRetries; import static com.google.cloud.bigquery.PolicyHelper.convertFromApiPolicy; import static com.google.cloud.bigquery.PolicyHelper.convertToApiPolicy; import static com.google.common.base.Preconditions.checkArgument; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; +import com.google.api.core.BetaApi; import com.google.api.core.InternalApi; import com.google.api.gax.paging.Page; import com.google.api.services.bigquery.model.ErrorProto; @@ -37,11 +36,12 @@ import com.google.cloud.PageImpl; import com.google.cloud.PageImpl.NextPageFetcher; import com.google.cloud.Policy; -import com.google.cloud.RetryHelper; -import com.google.cloud.RetryHelper.RetryHelperException; +import com.google.cloud.RetryOption; import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQueryRetryHelper.BigQueryRetryHelperException; import com.google.cloud.bigquery.InsertAllRequest.RowToInsert; import com.google.cloud.bigquery.spi.v2.BigQueryRpc; +import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Strings; @@ -51,10 +51,17 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.checkerframework.checker.nullness.qual.NonNull; final class BigQueryImpl extends BaseService implements BigQuery { @@ -237,7 +244,17 @@ public Page getNextPage() { } } - private final BigQueryRpc bigQueryRpc; + private final HttpBigQueryRpc bigQueryRpc; + + private static final BigQueryRetryConfig EMPTY_RETRY_CONFIG = + BigQueryRetryConfig.newBuilder().build(); + + private static final BigQueryRetryConfig DEFAULT_RETRY_CONFIG = + BigQueryRetryConfig.newBuilder() + .retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG) + .retryOnMessage(BigQueryErrorMessages.JOB_RATE_LIMIT_EXCEEDED_MSG) + .retryOnRegEx(BigQueryErrorMessages.RetryRegExPatterns.RATE_LIMIT_EXCEEDED_REGEX) + .build(); // retry config with Error Messages and RegEx for RateLimitExceeded Error BigQueryImpl(BigQueryOptions options) { super(options); @@ -254,21 +271,39 @@ public Dataset create(DatasetInfo datasetInfo, DatasetOption... options) { : datasetInfo.getDatasetId().getProject()) .toPb(); final Map optionsMap = optionMap(options); - try { + Span datasetCreate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + datasetCreate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.createDataset") + .setAllAttributes(datasetInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope datasetCreateScope = datasetCreate != null ? datasetCreate.makeCurrent() : null) { return Dataset.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Dataset call() { - return bigQueryRpc.create(datasetPb, optionsMap); + public com.google.api.services.bigquery.model.Dataset call() throws IOException { + return bigQueryRpc.createSkipExceptionTranslation(datasetPb, optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (datasetCreate != null) { + datasetCreate.end(); + } } } @@ -281,22 +316,51 @@ public Table create(TableInfo tableInfo, TableOption... options) { ? getOptions().getProjectId() : tableInfo.getTableId().getProject()) .toPb(); + handleExternalTableSchema(tablePb); final Map optionsMap = optionMap(options); - try { + Span tableCreate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tableCreate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.createTable") + .setAllAttributes(tableInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tableCreateScope = tableCreate != null ? tableCreate.makeCurrent() : null) { return Table.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Table call() { - return bigQueryRpc.create(tablePb, optionsMap); + public com.google.api.services.bigquery.model.Table call() throws IOException { + return bigQueryRpc.createSkipExceptionTranslation(tablePb, optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (tableCreate != null) { + tableCreate.end(); + } + } + } + + private void handleExternalTableSchema( + final com.google.api.services.bigquery.model.Table tablePb) { + // Set schema on the Table for permanent external table + if (tablePb.getExternalDataConfiguration() != null) { + tablePb.setSchema(tablePb.getExternalDataConfiguration().getSchema()); + // clear table schema on ExternalDataConfiguration + tablePb.getExternalDataConfiguration().setSchema(null); } } @@ -310,21 +374,39 @@ public Routine create(RoutineInfo routineInfo, RoutineOption... options) { : routineInfo.getRoutineId().getProject()) .toPb(); final Map optionsMap = optionMap(options); - try { + Span routineCreate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routineCreate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.createRoutine") + .setAllAttributes(routineInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope createRoutineScope = routineCreate != null ? routineCreate.makeCurrent() : null) { return Routine.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Routine call() { - return bigQueryRpc.create(routinePb, optionsMap); + public com.google.api.services.bigquery.model.Routine call() throws IOException { + return bigQueryRpc.createSkipExceptionTranslation(routinePb, optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (routineCreate != null) { + routineCreate.end(); + } } } @@ -340,17 +422,37 @@ public JobId get() { return create(jobInfo, idProvider, options); } + @Override + @BetaApi + public Connection createConnection(@NonNull ConnectionSettings connectionSettings) + throws BigQueryException { + return new ConnectionImpl(connectionSettings, getOptions(), bigQueryRpc, DEFAULT_RETRY_CONFIG); + } + + @Override + @BetaApi + public Connection createConnection() throws BigQueryException { + ConnectionSettings defaultConnectionSettings = ConnectionSettings.newBuilder().build(); + return new ConnectionImpl( + defaultConnectionSettings, getOptions(), bigQueryRpc, DEFAULT_RETRY_CONFIG); + } + @InternalApi("visible for testing") Job create(JobInfo jobInfo, Supplier idProvider, JobOption... options) { - boolean idRandom = false; - if (jobInfo.getJobId() == null) { - jobInfo = jobInfo.toBuilder().setJobId(idProvider.get()).build(); - idRandom = true; - } - final com.google.api.services.bigquery.model.Job jobPb = - jobInfo.setProjectId(getOptions().getProjectId()).toPb(); - final Map optionsMap = optionMap(options); + final boolean idRandom = (jobInfo.getJobId() == null); + final Map optionsMap = optionMap(options); + Span jobCreate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + jobCreate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.createJob") + .setAllAttributes(jobInfo.getJobId().getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } BigQueryException createException; // NOTE(pongad): This double-try structure is admittedly odd. // translateAndThrow itself throws, and pretends to return an exception only @@ -358,47 +460,99 @@ Job create(JobInfo jobInfo, Supplier idProvider, JobOption... options) { // This makes it difficult to translate without throwing. // Fixing this entails some work on BaseServiceException.translate. // Since that affects a bunch of APIs, we should fix this as a separate change. - try { + final JobId[] finalJobId = new JobId[1]; + try (Scope jobCreateScope = jobCreate != null ? jobCreate.makeCurrent() : null) { try { - return Job.fromPb( - this, - runWithRetries( - new Callable() { - @Override - public com.google.api.services.bigquery.model.Job call() { - return bigQueryRpc.create(jobPb, optionsMap); - } - }, - getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { - throw BigQueryException.translateAndThrow(e); + try { + return Job.fromPb( + this, + BigQueryRetryHelper.runWithRetries( + new Callable() { + @Override + public com.google.api.services.bigquery.model.Job call() throws IOException { + if (idRandom) { + // re-generate a new random job with the same jobInfo when jobId is not + // provided by the user + JobInfo recreatedJobInfo = + jobInfo.toBuilder().setJobId(idProvider.get()).build(); + com.google.api.services.bigquery.model.Job newJobPb = + recreatedJobInfo.setProjectId(getOptions().getProjectId()).toPb(); + finalJobId[0] = recreatedJobInfo.getJobId(); + return bigQueryRpc.createSkipExceptionTranslation(newJobPb, optionsMap); + } else { + com.google.api.services.bigquery.model.Job jobPb = + jobInfo.setProjectId(getOptions().getProjectId()).toPb(); + return bigQueryRpc.createSkipExceptionTranslation(jobPb, optionsMap); + } + } + }, + getRetryOptions(optionsMap) != null + ? RetryOption.mergeToSettings( + getOptions().getRetrySettings(), getRetryOptions(optionsMap)) + : getOptions().getRetrySettings(), + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + getBigQueryRetryConfig(optionsMap) != null + ? getBigQueryRetryConfig(optionsMap) + : DEFAULT_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } catch (BigQueryException e) { + createException = e; } - } catch (BigQueryException e) { - createException = e; - } - if (!idRandom) { - throw createException; - } + if (!idRandom) { + if (createException instanceof BigQueryException + && createException.getCause() != null + && createException.getCause().getMessage() != null) { + + Pattern pattern = Pattern.compile(".*Already.*Exists:.*Job.*", Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(createException.getCause().getMessage()); + + if (matcher.find()) { + // If the Job ALREADY EXISTS, retrieve it. + Job job = this.getJob(jobInfo.getJobId(), JobOption.fields(JobField.STATISTICS)); + + long jobCreationTime = job.getStatistics().getCreationTime(); + long jobMinStaleTime = System.currentTimeMillis(); + long jobMaxStaleTime = + java.time.Instant.ofEpochMilli(jobMinStaleTime) + .minus(1, java.time.temporal.ChronoUnit.DAYS) + .toEpochMilli(); + + // Only return the job if it has been created in the past 24 hours. + // This is assuming any job older than 24 hours is a valid duplicate JobID + // and not a false positive like b/290419183 + if (jobCreationTime >= jobMaxStaleTime && jobCreationTime <= jobMinStaleTime) { + return job; + } + } + } + throw createException; + } - // If create RPC fails, it's still possible that the job has been successfully - // created, - // and get might work. - // We can only do this if we randomly generated the ID. Otherwise we might - // mistakenly - // fetch a job created by someone else. - Job job; - try { - job = getJob(jobInfo.getJobId()); - } catch (BigQueryException e) { - throw createException; - } - if (job == null) { - throw createException; + // If create RPC fails, it's still possible that the job has been successfully + // created, and get might work. + // We can only do this if we randomly generated the ID. Otherwise we might + // mistakenly fetch a job created by someone else. + Job job; + try { + job = getJob(finalJobId[0]); + } catch (BigQueryException e) { + throw createException; + } + if (job == null) { + throw createException; + } + return job; + } finally { + if (jobCreate != null) { + jobCreate.end(); + } } - return job; } @Override @@ -410,25 +564,46 @@ public Dataset getDataset(String datasetId, DatasetOption... options) { public Dataset getDataset(final DatasetId datasetId, DatasetOption... options) { final DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId()); final Map optionsMap = optionMap(options); - try { + Span datasetGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + datasetGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getDataset") + .setAllAttributes(completeDatasetId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope datasetGetScope = datasetGet != null ? datasetGet.makeCurrent() : null) { com.google.api.services.bigquery.model.Dataset answer = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Dataset call() { - return bigQueryRpc.getDataset( + public com.google.api.services.bigquery.model.Dataset call() throws IOException { + return bigQueryRpc.getDatasetSkipExceptionTranslation( completeDatasetId.getProject(), completeDatasetId.getDataset(), optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - if (getOptions().getThrowNotFound() && answer == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Dataset not found"); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return Dataset.fromPb(this, answer); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + if (getOptions().getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Dataset not found"); + } + return null; } - return answer == null ? null : Dataset.fromPb(this, answer); - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (datasetGet != null) { + datasetGet.end(); + } } } @@ -439,7 +614,24 @@ public Page listDatasets(DatasetListOption... options) { @Override public Page listDatasets(String projectId, DatasetListOption... options) { - return listDatasets(projectId, getOptions(), optionMap(options)); + Span datasetsList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + datasetsList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listDatasets") + .setAttribute("bq.dataset.project_id", projectId) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope datasetsListScope = datasetsList != null ? datasetsList.makeCurrent() : null) { + return listDatasets(projectId, getOptions(), optionMap(options)); + } finally { + if (datasetsList != null) { + datasetsList.end(); + } + } } private static Page listDatasets( @@ -448,18 +640,23 @@ private static Page listDatasets( final Map optionsMap) { try { Tuple> result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable< Tuple>>() { @Override public Tuple> - call() { - return serviceOptions.getBigQueryRpcV2().listDatasets(projectId, optionsMap); + call() throws IOException { + return serviceOptions + .getBigQueryRpcV2() + .listDatasetsSkipExceptionTranslation(projectId, optionsMap); } }, serviceOptions.getRetrySettings(), - EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); String cursor = result.x(); return new PageImpl<>( new DatasetPageFetcher(projectId, serviceOptions, cursor, optionsMap), @@ -472,7 +669,7 @@ public Dataset apply(com.google.api.services.bigquery.model.Dataset dataset) { return Dataset.fromPb(serviceOptions.getService(), dataset); } })); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -486,20 +683,41 @@ public boolean delete(String datasetId, DatasetDeleteOption... options) { public boolean delete(DatasetId datasetId, DatasetDeleteOption... options) { final DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId()); final Map optionsMap = optionMap(options); - try { - return runWithRetries( + Span datasetDelete = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + datasetDelete = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.deleteDataset") + .setAllAttributes(datasetId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope datasetDeleteScope = datasetDelete != null ? datasetDelete.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.deleteDataset( + public Boolean call() throws IOException { + return bigQueryRpc.deleteDatasetSkipExceptionTranslation( completeDatasetId.getProject(), completeDatasetId.getDataset(), optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + return false; + } throw BigQueryException.translateAndThrow(e); + } finally { + if (datasetDelete != null) { + datasetDelete.end(); + } } } @@ -515,22 +733,42 @@ public boolean delete(TableId tableId) { Strings.isNullOrEmpty(tableId.getProject()) ? getOptions().getProjectId() : tableId.getProject()); - try { - return runWithRetries( + Span tableDelete = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tableDelete = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.deleteTable") + .setAllAttributes(tableId.getOtelAttributes()) + .startSpan(); + } + try (Scope tableDeleteScope = tableDelete != null ? tableDelete.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.deleteTable( + public Boolean call() throws IOException { + return bigQueryRpc.deleteTableSkipExceptionTranslation( completeTableId.getProject(), completeTableId.getDataset(), completeTableId.getTable()); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + return false; + } throw BigQueryException.translateAndThrow(e); + } finally { + if (tableDelete != null) { + tableDelete.end(); + } } } @@ -541,22 +779,42 @@ public boolean delete(ModelId modelId) { Strings.isNullOrEmpty(modelId.getProject()) ? getOptions().getProjectId() : modelId.getProject()); - try { - return runWithRetries( + Span modelDelete = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + modelDelete = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.deleteModel") + .setAllAttributes(modelId.getOtelAttributes()) + .startSpan(); + } + try (Scope modelDeleteScope = modelDelete != null ? modelDelete.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.deleteModel( + public Boolean call() throws IOException { + return bigQueryRpc.deleteModelSkipExceptionTranslation( completeModelId.getProject(), completeModelId.getDataset(), completeModelId.getModel()); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + return false; + } throw BigQueryException.translateAndThrow(e); + } finally { + if (modelDelete != null) { + modelDelete.end(); + } } } @@ -567,22 +825,83 @@ public boolean delete(RoutineId routineId) { Strings.isNullOrEmpty(routineId.getProject()) ? getOptions().getProjectId() : routineId.getProject()); - try { - return runWithRetries( + Span routineDelete = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routineDelete = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.deleteRoutine") + .setAllAttributes(routineId.getOtelAttributes()) + .startSpan(); + } + try (Scope routineDeleteScope = routineDelete != null ? routineDelete.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.deleteRoutine( + public Boolean call() throws IOException { + return bigQueryRpc.deleteRoutineSkipExceptionTranslation( completeRoutineId.getProject(), completeRoutineId.getDataset(), completeRoutineId.getRoutine()); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + return false; + } + throw BigQueryException.translateAndThrow(e); + } finally { + if (routineDelete != null) { + routineDelete.end(); + } + } + } + + @Override + public boolean delete(JobId jobId) { + final JobId completeJobId = + jobId.setProjectId( + Strings.isNullOrEmpty(jobId.getProject()) + ? getOptions().getProjectId() + : jobId.getProject()); + Span jobDelete = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + jobDelete = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.deleteJob") + .setAllAttributes(completeJobId.getOtelAttributes()) + .startSpan(); + } + try { + return BigQueryRetryHelper.runWithRetries( + new Callable() { + @Override + public Boolean call() throws IOException { + return bigQueryRpc.deleteJobSkipExceptionTranslation( + completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation()); + } + }, + getOptions().getRetrySettings(), + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (jobDelete != null) { + jobDelete.end(); + } } } @@ -591,21 +910,39 @@ public Dataset update(DatasetInfo datasetInfo, DatasetOption... options) { final com.google.api.services.bigquery.model.Dataset datasetPb = datasetInfo.setProjectId(getOptions().getProjectId()).toPb(); final Map optionsMap = optionMap(options); - try { + Span datasetUpdate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + datasetUpdate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.updateDataset") + .setAllAttributes(datasetInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope datasetUpdateScope = datasetUpdate != null ? datasetUpdate.makeCurrent() : null) { return Dataset.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Dataset call() { - return bigQueryRpc.patch(datasetPb, optionsMap); + public com.google.api.services.bigquery.model.Dataset call() throws IOException { + return bigQueryRpc.patchSkipExceptionTranslation(datasetPb, optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (datasetUpdate != null) { + datasetUpdate.end(); + } } } @@ -618,22 +955,41 @@ public Table update(TableInfo tableInfo, TableOption... options) { ? getOptions().getProjectId() : tableInfo.getTableId().getProject()) .toPb(); + handleExternalTableSchema(tablePb); final Map optionsMap = optionMap(options); - try { + Span tableUpdate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tableUpdate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.updateTable") + .setAllAttributes(tableInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tableUpdateScope = tableUpdate != null ? tableUpdate.makeCurrent() : null) { return Table.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Table call() { - return bigQueryRpc.patch(tablePb, optionsMap); + public com.google.api.services.bigquery.model.Table call() throws IOException { + return bigQueryRpc.patchSkipExceptionTranslation(tablePb, optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (tableUpdate != null) { + tableUpdate.end(); + } } } @@ -647,21 +1003,39 @@ public Model update(ModelInfo modelInfo, ModelOption... options) { : modelInfo.getModelId().getProject()) .toPb(); final Map optionsMap = optionMap(options); - try { + Span modelUpdate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + modelUpdate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.updateModel") + .setAllAttributes(modelInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope modelUpdateScope = modelUpdate != null ? modelUpdate.makeCurrent() : null) { return Model.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Model call() { - return bigQueryRpc.patch(modelPb, optionsMap); + public com.google.api.services.bigquery.model.Model call() throws IOException { + return bigQueryRpc.patchSkipExceptionTranslation(modelPb, optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (modelUpdate != null) { + modelUpdate.end(); + } } } @@ -675,21 +1049,39 @@ public Routine update(RoutineInfo routineInfo, RoutineOption... options) { : routineInfo.getRoutineId().getProject()) .toPb(); final Map optionsMap = optionMap(options); - try { + Span routineUpdate = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routineUpdate = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.updateRoutine") + .setAllAttributes(routineInfo.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope routineUpdateScope = routineUpdate != null ? routineUpdate.makeCurrent() : null) { return Routine.fromPb( this, - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Routine call() { - return bigQueryRpc.update(routinePb, optionsMap); + public com.google.api.services.bigquery.model.Routine call() throws IOException { + return bigQueryRpc.updateSkipExceptionTranslation(routinePb, optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (routineUpdate != null) { + routineUpdate.end(); + } } } @@ -708,13 +1100,24 @@ public Table getTable(TableId tableId, TableOption... options) { ? getOptions().getProjectId() : tableId.getProject()); final Map optionsMap = optionMap(options); - try { + Span tableGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tableGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getTable") + .setAllAttributes(tableId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tableGetScope = tableGet != null ? tableGet.makeCurrent() : null) { com.google.api.services.bigquery.model.Table answer = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Table call() { - return bigQueryRpc.getTable( + public com.google.api.services.bigquery.model.Table call() throws IOException { + return bigQueryRpc.getTableSkipExceptionTranslation( completeTableId.getProject(), completeTableId.getDataset(), completeTableId.getTable(), @@ -722,14 +1125,24 @@ public com.google.api.services.bigquery.model.Table call() { } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - if (getOptions().getThrowNotFound() && answer == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Table not found"); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return Table.fromPb(this, answer); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + if (getOptions().getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Table not found"); + } + return null; } - return answer == null ? null : Table.fromPb(this, answer); - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (tableGet != null) { + tableGet.end(); + } } } @@ -746,13 +1159,24 @@ public Model getModel(ModelId modelId, ModelOption... options) { ? getOptions().getProjectId() : modelId.getProject()); final Map optionsMap = optionMap(options); - try { + Span modelGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + modelGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getModel") + .setAllAttributes(modelId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope modelGetScope = modelGet != null ? modelGet.makeCurrent() : null) { com.google.api.services.bigquery.model.Model answer = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Model call() { - return bigQueryRpc.getModel( + public com.google.api.services.bigquery.model.Model call() throws IOException { + return bigQueryRpc.getModelSkipExceptionTranslation( completeModelId.getProject(), completeModelId.getDataset(), completeModelId.getModel(), @@ -760,14 +1184,24 @@ public com.google.api.services.bigquery.model.Model call() { } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - if (getOptions().getThrowNotFound() && answer == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Model not found"); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return Model.fromPb(this, answer); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + if (getOptions().getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Model not found"); + } + return null; } - return answer == null ? null : Model.fromPb(this, answer); - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (modelGet != null) { + modelGet.end(); + } } } @@ -784,13 +1218,24 @@ public Routine getRoutine(RoutineId routineId, RoutineOption... options) { ? getOptions().getProjectId() : routineId.getProject()); final Map optionsMap = optionMap(options); - try { + Span routineGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routineGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getRoutine") + .setAllAttributes(routineId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope routineGetScope = routineGet != null ? routineGet.makeCurrent() : null) { com.google.api.services.bigquery.model.Routine answer = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Routine call() { - return bigQueryRpc.getRoutine( + public com.google.api.services.bigquery.model.Routine call() throws IOException { + return bigQueryRpc.getRoutineSkipExceptionTranslation( completeRoutineId.getProject(), completeRoutineId.getDataset(), completeRoutineId.getRoutine(), @@ -798,75 +1243,203 @@ public com.google.api.services.bigquery.model.Routine call() { } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - if (getOptions().getThrowNotFound() && answer == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Routine not found"); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return Routine.fromPb(this, answer); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + if (getOptions().getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Routine not found"); + } + return null; } - return answer == null ? null : Routine.fromPb(this, answer); - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (routineGet != null) { + routineGet.end(); + } } } @Override public Page
    listTables(String datasetId, TableListOption... options) { - return listTables( - DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + Span tablesList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tablesList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listTables") + .setAllAttributes(DatasetId.of(datasetId).getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tablesListScope = tablesList != null ? tablesList.makeCurrent() : null) { + return listTables( + DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + } finally { + if (tablesList != null) { + tablesList.end(); + } + } } @Override public Page
    listTables(DatasetId datasetId, TableListOption... options) { DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId()); - return listTables(completeDatasetId, getOptions(), optionMap(options)); + Span tablesList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tablesList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listTables") + .setAllAttributes(completeDatasetId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tablesListScope = tablesList != null ? tablesList.makeCurrent() : null) { + return listTables(completeDatasetId, getOptions(), optionMap(options)); + } finally { + if (tablesList != null) { + tablesList.end(); + } + } } @Override public Page listModels(String datasetId, ModelListOption... options) { - return listModels( - DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + Span modelsList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + modelsList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listModels") + .setAllAttributes(DatasetId.of(datasetId).getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope modelsListScope = modelsList != null ? modelsList.makeCurrent() : null) { + return listModels( + DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + } finally { + if (modelsList != null) { + modelsList.end(); + } + } } @Override public Page listModels(DatasetId datasetId, ModelListOption... options) { DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId()); - return listModels(completeDatasetId, getOptions(), optionMap(options)); + Span modelsList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + modelsList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listModels") + .setAllAttributes(datasetId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope modelsListScope = modelsList != null ? modelsList.makeCurrent() : null) { + return listModels(completeDatasetId, getOptions(), optionMap(options)); + } finally { + if (modelsList != null) { + modelsList.end(); + } + } } @Override public Page listRoutines(String datasetId, RoutineListOption... options) { - return listRoutines( - DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + Span routinesList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routinesList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listRoutines") + .setAllAttributes(DatasetId.of(datasetId).getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope routinesListScope = routinesList != null ? routinesList.makeCurrent() : null) { + return listRoutines( + DatasetId.of(getOptions().getProjectId(), datasetId), getOptions(), optionMap(options)); + } finally { + if (routinesList != null) { + routinesList.end(); + } + } } @Override public Page listRoutines(DatasetId datasetId, RoutineListOption... options) { DatasetId completeDatasetId = datasetId.setProjectId(getOptions().getProjectId()); - return listRoutines(completeDatasetId, getOptions(), optionMap(options)); + Span routinesList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + routinesList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listRoutines") + .setAllAttributes(datasetId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope routinesListScope = routinesList != null ? routinesList.makeCurrent() : null) { + return listRoutines(completeDatasetId, getOptions(), optionMap(options)); + } finally { + if (routinesList != null) { + routinesList.end(); + } + } } @Override public List listPartitions(TableId tableId) { - List partitions = new ArrayList(); - String partitionsTable = tableId.getTable() + "$__PARTITIONS_SUMMARY__"; - TableId metaTableId = - tableId.getProject() == null - ? TableId.of(tableId.getDataset(), partitionsTable) - : TableId.of(tableId.getProject(), tableId.getDataset(), partitionsTable); - Table metaTable = getTable(metaTableId); - Schema metaSchema = metaTable.getDefinition().getSchema(); - String partition_id = null; - for (Field field : metaSchema.getFields()) { - if (field.getName().equals("partition_id")) { - partition_id = field.getName(); - break; - } + Span listPartitions = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + listPartitions = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listPartitions") + .setAllAttributes(tableId.getOtelAttributes()) + .startSpan(); } - TableResult result = metaTable.list(metaSchema); - for (FieldValueList list : result.iterateAll()) { - partitions.add(list.get(partition_id).getStringValue()); + try (Scope listPartitionsScope = listPartitions != null ? listPartitions.makeCurrent() : null) { + List partitions = new ArrayList(); + String partitionsTable = tableId.getTable() + "$__PARTITIONS_SUMMARY__"; + TableId metaTableId = + tableId.getProject() == null + ? TableId.of(tableId.getDataset(), partitionsTable) + : TableId.of(tableId.getProject(), tableId.getDataset(), partitionsTable); + Table metaTable = getTable(metaTableId); + Schema metaSchema = metaTable.getDefinition().getSchema(); + String partition_id = null; + for (Field field : metaSchema.getFields()) { + if (field.getName().equals("partition_id")) { + partition_id = field.getName(); + break; + } + } + TableResult result = metaTable.list(metaSchema); + for (FieldValueList list : result.iterateAll()) { + partitions.add(list.get(partition_id).getStringValue()); + } + return partitions; + } finally { + if (listPartitions != null) { + listPartitions.end(); + } } - return partitions; } private static Page
    listTables( @@ -875,20 +1448,24 @@ private static Page
    listTables( final Map optionsMap) { try { Tuple> result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable< Tuple>>() { @Override - public Tuple> - call() { + public Tuple> call() + throws IOException { return serviceOptions .getBigQueryRpcV2() - .listTables(datasetId.getProject(), datasetId.getDataset(), optionsMap); + .listTablesSkipExceptionTranslation( + datasetId.getProject(), datasetId.getDataset(), optionsMap); } }, serviceOptions.getRetrySettings(), - EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); String cursor = result.x(); Iterable
    tables = Iterables.transform( @@ -901,7 +1478,7 @@ public Table apply(com.google.api.services.bigquery.model.Table table) { }); return new PageImpl<>( new TablePageFetcher(datasetId, serviceOptions, cursor, optionsMap), cursor, tables); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -912,20 +1489,24 @@ private static Page listModels( final Map optionsMap) { try { Tuple> result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable< Tuple>>() { @Override - public Tuple> - call() { + public Tuple> call() + throws IOException { return serviceOptions .getBigQueryRpcV2() - .listModels(datasetId.getProject(), datasetId.getDataset(), optionsMap); + .listModelsSkipExceptionTranslation( + datasetId.getProject(), datasetId.getDataset(), optionsMap); } }, serviceOptions.getRetrySettings(), - EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); String cursor = result.x(); Iterable models = Iterables.transform( @@ -938,7 +1519,7 @@ public Model apply(com.google.api.services.bigquery.model.Model model) { }); return new PageImpl<>( new ModelPageFetcher(datasetId, serviceOptions, cursor, optionsMap), cursor, models); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -949,20 +1530,24 @@ private static Page listRoutines( final Map optionsMap) { try { Tuple> result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable< Tuple>>() { @Override public Tuple> - call() { + call() throws IOException { return serviceOptions .getBigQueryRpcV2() - .listRoutines(datasetId.getProject(), datasetId.getDataset(), optionsMap); + .listRoutinesSkipExceptionTranslation( + datasetId.getProject(), datasetId.getDataset(), optionsMap); } }, serviceOptions.getRetrySettings(), - EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); String cursor = result.x(); Iterable routines = Iterables.transform( @@ -975,7 +1560,7 @@ public Routine apply(com.google.api.services.bigquery.model.Routine routinePb) { }); return new PageImpl<>( new RoutinePageFetcher(datasetId, serviceOptions, cursor, optionsMap), cursor, routines); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -1013,31 +1598,54 @@ public Rows apply(RowToInsert rowToInsert) { requestPb.setRows(rowsPb); TableDataInsertAllResponse responsePb; - if (allInsertIdsSet[0]) { - // allowing retries only if all row insertIds are set (used for deduplication) - try { + Span insertAll = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + insertAll = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.insertAll") + .setAllAttributes(request.getOtelAttributes()) + .startSpan(); + } + try (Scope insertAllScope = insertAll != null ? insertAll.makeCurrent() : null) { + if (allInsertIdsSet[0]) { + // allowing retries only if all row insertIds are set (used for deduplication) + try { + responsePb = + BigQueryRetryHelper.runWithRetries( + new Callable() { + @Override + public TableDataInsertAllResponse call() throws Exception { + return bigQueryRpc.insertAllSkipExceptionTranslation( + tableId.getProject(), + tableId.getDataset(), + tableId.getTable(), + requestPb); + } + }, + getOptions().getRetrySettings(), + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } else { + // Use insertAll that translate the exception as we are not retrying. responsePb = - runWithRetries( - new Callable() { - @Override - public TableDataInsertAllResponse call() throws Exception { - return bigQueryRpc.insertAll( - tableId.getProject(), tableId.getDataset(), tableId.getTable(), requestPb); - } - }, - getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelperException e) { - throw BigQueryException.translateAndThrow(e); + bigQueryRpc.insertAll( + tableId.getProject(), tableId.getDataset(), tableId.getTable(), requestPb); } - } else { - responsePb = - bigQueryRpc.insertAll( - tableId.getProject(), tableId.getDataset(), tableId.getTable(), requestPb); - } - return InsertAllResponse.fromPb(responsePb); + return InsertAllResponse.fromPb(responsePb); + } finally { + if (insertAll != null) { + insertAll.end(); + } + } } @Override @@ -1061,7 +1669,28 @@ public TableResult listTableData( public TableResult listTableData(TableId tableId, Schema schema, TableDataListOption... options) { Tuple, Long> data = listTableData(tableId, schema, getOptions(), optionMap(options)); - return new TableResult(schema, data.y(), data.x()); + Span tableDataList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + tableDataList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listTableData") + .setAllAttributes(tableId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope tableDataListScope = tableDataList != null ? tableDataList.makeCurrent() : null) { + return TableResult.newBuilder() + .setSchema(schema) + .setTotalRows(data.y()) + .setPageNoSchema(data.x()) + .build(); + } finally { + if (tableDataList != null) { + tableDataList.end(); + } + } } private static Tuple, Long> listTableData( @@ -1076,13 +1705,13 @@ private static Tuple, Long> listTableData( ? serviceOptions.getProjectId() : tableId.getProject()); TableDataList result = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public TableDataList call() { + public TableDataList call() throws IOException { return serviceOptions .getBigQueryRpcV2() - .listTableData( + .listTableDataSkipExceptionTranslation( completeTableId.getProject(), completeTableId.getDataset(), completeTableId.getTable(), @@ -1090,8 +1719,11 @@ public TableDataList call() { } }, serviceOptions.getRetrySettings(), - EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); String cursor = result.getPageToken(); Map pageOptionMap = Strings.isNullOrEmpty(cursor) ? optionsMap : optionMap(TableDataListOption.startIndex(0)); @@ -1099,15 +1731,18 @@ public TableDataList call() { new PageImpl<>( new TableDataPageFetcher(tableId, schema, serviceOptions, cursor, pageOptionMap), cursor, - transformTableData(result.getRows(), schema)), + transformTableData( + result.getRows(), + schema, + serviceOptions.getDataFormatOptions().useInt64Timestamp())), result.getTotalRows()); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } private static Iterable transformTableData( - Iterable tableDataPb, final Schema schema) { + Iterable tableDataPb, final Schema schema, boolean useInt64Timestamps) { return ImmutableList.copyOf( Iterables.transform( tableDataPb != null ? tableDataPb : ImmutableList.of(), @@ -1116,7 +1751,7 @@ private static Iterable transformTableData( @Override public FieldValueList apply(TableRow rowPb) { - return FieldValueList.fromPb(rowPb.getF(), fields); + return FieldValueList.fromPb(rowPb.getF(), fields, useInt64Timestamps); } })); } @@ -1136,13 +1771,24 @@ public Job getJob(JobId jobId, JobOption... options) { jobId.getLocation() == null && getOptions().getLocation() != null ? getOptions().getLocation() : jobId.getLocation()); - try { + Span jobGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + jobGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getJob") + .setAllAttributes(completeJobId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope jobGetScope = jobGet != null ? jobGet.makeCurrent() : null) { com.google.api.services.bigquery.model.Job answer = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Job call() { - return bigQueryRpc.getJob( + public com.google.api.services.bigquery.model.Job call() throws IOException { + return bigQueryRpc.getJobSkipExceptionTranslation( completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation(), @@ -1150,48 +1796,82 @@ public com.google.api.services.bigquery.model.Job call() { } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - if (getOptions().getThrowNotFound() && answer == null) { - throw new BigQueryException(HTTP_NOT_FOUND, "Job not found"); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return Job.fromPb(this, answer); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + if (getOptions().getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Job not found"); + } + return null; } - return answer == null ? null : Job.fromPb(this, answer); - } catch (RetryHelper.RetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (jobGet != null) { + jobGet.end(); + } } } @Override public Page listJobs(JobListOption... options) { - return listJobs(getOptions(), optionMap(options)); + Span jobsList = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + jobsList = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.listJobs") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope jobsListScope = jobsList != null ? jobsList.makeCurrent() : null) { + return listJobs(getOptions(), optionMap(options)); + } finally { + if (jobsList != null) { + jobsList.end(); + } + } } private static Page listJobs( final BigQueryOptions serviceOptions, final Map optionsMap) { - Tuple> result = - runWithRetries( - new Callable>>() { - @Override - public Tuple> call() { - return serviceOptions - .getBigQueryRpcV2() - .listJobs(serviceOptions.getProjectId(), optionsMap); - } - }, - serviceOptions.getRetrySettings(), - EXCEPTION_HANDLER, - serviceOptions.getClock()); - String cursor = result.x(); - Iterable jobs = - Iterables.transform( - result.y(), - new Function() { - @Override - public Job apply(com.google.api.services.bigquery.model.Job job) { - return Job.fromPb(serviceOptions.getService(), job); - } - }); - return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs); + try { + Tuple> result = + BigQueryRetryHelper.runWithRetries( + new Callable>>() { + @Override + public Tuple> call() + throws IOException { + return serviceOptions + .getBigQueryRpcV2() + .listJobsSkipExceptionTranslation(serviceOptions.getProjectId(), optionsMap); + } + }, + serviceOptions.getRetrySettings(), + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + EMPTY_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); + String cursor = result.x(); + Iterable jobs = + Iterables.transform( + result.y(), + new Function() { + @Override + public Job apply(com.google.api.services.bigquery.model.Job job) { + return Job.fromPb(serviceOptions.getService(), job); + } + }); + return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs); + } catch (BigQueryRetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } } @Override @@ -1208,65 +1888,93 @@ public boolean cancel(JobId jobId) { jobId.getLocation() == null && getOptions().getLocation() != null ? getOptions().getLocation() : jobId.getLocation()); - try { - return runWithRetries( + Span jobCancel = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + jobCancel = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.cancelJob") + .setAllAttributes(completeJobId.getOtelAttributes()) + .startSpan(); + } + try (Scope jobCancelScope = jobCancel != null ? jobCancel.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public Boolean call() { - return bigQueryRpc.cancel( + public Boolean call() throws IOException { + return bigQueryRpc.cancelSkipExceptionTranslation( completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation()); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { + if (isRetryErrorCodeHttpNotFound(e)) { + return false; + } throw BigQueryException.translateAndThrow(e); + } finally { + if (jobCancel != null) { + jobCancel.end(); + } } } @Override public TableResult query(QueryJobConfiguration configuration, JobOption... options) throws InterruptedException, JobException { - Job.checkNotDryRun(configuration, "query"); - - // If all parameters passed in configuration are supported by the query() method on the backend, - // put on fast path - QueryRequestInfo requestInfo = new QueryRequestInfo(configuration); - if (requestInfo.isFastQuerySupported()) { - String projectId = getOptions().getProjectId(); - QueryRequest content = requestInfo.toPb(); - return queryRpc(projectId, content, options); - } - // Otherwise, fall back to the existing create query job logic - return create(JobInfo.of(configuration), options).getQueryResults(); + return query(configuration, null, options); } - private TableResult queryRpc( - final String projectId, final QueryRequest content, JobOption... options) + private Object queryRpc(final String projectId, final QueryRequest content, JobOption... options) throws InterruptedException { com.google.api.services.bigquery.model.QueryResponse results; - try { + Span queryRpc = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + queryRpc = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.queryRpc") + .setAttribute("bq.query.project_id", projectId) + .setAllAttributes(otelAttributesFromQueryRequest(content)) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope queryRpcScope = queryRpc != null ? queryRpc.makeCurrent() : null) { results = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.QueryResponse call() { - return bigQueryRpc.queryRpc(projectId, content); + public com.google.api.services.bigquery.model.QueryResponse call() + throws IOException { + return bigQueryRpc.queryRpcSkipExceptionTranslation(projectId, content); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - } catch (RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + DEFAULT_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (queryRpc != null) { + queryRpc.end(); + } } if (results.getErrors() != null) { List bigQueryErrors = Lists.transform(results.getErrors(), BigQueryError.FROM_PB_FUNCTION); // Throwing BigQueryException since there may be no JobId and we want to stay consistent - // with the case where there there is a HTTP error + // with the case where there is an HTTP error throw new BigQueryException(bigQueryErrors); } @@ -1287,43 +1995,145 @@ public com.google.api.services.bigquery.model.QueryResponse call() { // here, but this is left as future work. JobId jobId = JobId.fromPb(results.getJobReference()); Job job = getJob(jobId, options); - return job.getQueryResults(); + return job; } if (results.getPageToken() != null) { JobId jobId = JobId.fromPb(results.getJobReference()); String cursor = results.getPageToken(); - return new TableResult( - schema, - numRows, - new PageImpl<>( - // fetch next pages of results - new QueryPageFetcher(jobId, schema, getOptions(), cursor, optionMap(options)), - cursor, - // cache first page of result - transformTableData(results.getRows(), schema))); + return TableResult.newBuilder() + .setSchema(schema) + .setTotalRows(numRows) + .setPageNoSchema( + new PageImpl<>( + // fetch next pages of results + new QueryPageFetcher(jobId, schema, getOptions(), cursor, optionMap(options)), + cursor, + transformTableData( + results.getRows(), + schema, + getOptions().getDataFormatOptions().useInt64Timestamp()))) + .setJobId(jobId) + .setQueryId(results.getQueryId()) + .setJobCreationReason(JobCreationReason.fromPb(results.getJobCreationReason())) + .build(); } // only 1 page of result - return new TableResult( - schema, - numRows, - new PageImpl<>( - new TableDataPageFetcher(null, schema, getOptions(), null, optionMap(options)), - null, - transformTableData(results.getRows(), schema))); + return TableResult.newBuilder() + .setSchema(schema) + .setTotalRows(numRows) + .setPageNoSchema( + new PageImpl<>( + new TableDataPageFetcher(null, schema, getOptions(), null, optionMap(options)), + null, + transformTableData( + results.getRows(), + schema, + getOptions().getDataFormatOptions().useInt64Timestamp()))) + // Return the JobID of the successful job + .setJobId( + results.getJobReference() != null ? JobId.fromPb(results.getJobReference()) : null) + .setQueryId(results.getQueryId()) + .setJobCreationReason(JobCreationReason.fromPb(results.getJobCreationReason())) + .build(); } @Override public TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... options) throws InterruptedException, JobException { + Object result = queryWithTimeout(configuration, jobId, null, options); + if (result instanceof Job) { + return ((Job) result).getQueryResults(); + } + return (TableResult) result; + } + + @Override + public Object queryWithTimeout( + QueryJobConfiguration configuration, JobId jobId, Long timeoutMs, JobOption... options) + throws InterruptedException, JobException { Job.checkNotDryRun(configuration, "query"); - return create(JobInfo.of(jobId, configuration), options).getQueryResults(); + + // If JobCreationMode is not explicitly set, update it with default value; + if (configuration.getJobCreationMode() == null) { + configuration = + configuration.toBuilder() + .setJobCreationMode(getOptions().getDefaultJobCreationMode()) + .build(); + } + + Span querySpan = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + querySpan = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.queryWithTimeout") + .setAllAttributes(jobId != null ? jobId.getOtelAttributes() : null) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope queryScope = querySpan != null ? querySpan.makeCurrent() : null) { + // If all parameters passed in configuration are supported by the query() method on the + // backend, put on fast path + QueryRequestInfo requestInfo = + new QueryRequestInfo(configuration, getOptions().getDataFormatOptions()); + if (requestInfo.isFastQuerySupported(jobId)) { + // Be careful when setting the projectID in JobId, if a projectID is specified in the JobId, + // the job created by the query method will use that project. This may cause the query to + // fail with "Access denied" if the project do not have enough permissions to run the job. + + String projectId = + jobId != null && jobId.getProject() != null + ? jobId.getProject() + : getOptions().getProjectId(); + QueryRequest content = requestInfo.toPb(); + // Be careful when setting the location, if a location is specified in the BigQueryOption or + // JobId the job created by the query method will be in that location, even if the table to + // be + // queried is in a different location. This may cause the query to fail with + // "BigQueryException: Not found" + if (jobId != null && jobId.getLocation() != null) { + content.setLocation(jobId.getLocation()); + } else if (getOptions().getLocation() != null) { + content.setLocation(getOptions().getLocation()); + } + if (timeoutMs != null) { + content.setTimeoutMs(timeoutMs); + } + + return queryRpc(projectId, content, options); + } + return create(JobInfo.of(jobId, configuration), options).getQueryResults(); + } finally { + if (querySpan != null) { + querySpan.end(); + } + } } @Override public QueryResponse getQueryResults(JobId jobId, QueryResultsOption... options) { Map optionsMap = optionMap(options); - return getQueryResults(jobId, getOptions(), optionsMap); + Span getQueryResults = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + getQueryResults = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getQueryResults") + .setAllAttributes(jobId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope getQueryResultsScope = + getQueryResults != null ? getQueryResults.makeCurrent() : null) { + return getQueryResults(jobId, getOptions(), optionsMap); + } finally { + if (getQueryResults != null) { + getQueryResults.end(); + } + } } private static QueryResponse getQueryResults( @@ -1339,13 +2149,13 @@ private static QueryResponse getQueryResults( : jobId.getLocation()); try { GetQueryResultsResponse results = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public GetQueryResultsResponse call() { + public GetQueryResultsResponse call() throws IOException { return serviceOptions .getBigQueryRpcV2() - .getQueryResults( + .getQueryResultsSkipExceptionTranslation( completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation(), @@ -1353,8 +2163,12 @@ public GetQueryResultsResponse call() { } }, serviceOptions.getRetrySettings(), - EXCEPTION_HANDLER, - serviceOptions.getClock()); + serviceOptions.getResultRetryAlgorithm(), + serviceOptions.getClock(), + DEFAULT_RETRY_CONFIG, + serviceOptions.isOpenTelemetryTracingEnabled(), + serviceOptions.getOpenTelemetryTracer()); + TableSchema schemaPb = results.getSchema(); ImmutableList.Builder errors = ImmutableList.builder(); @@ -1370,7 +2184,7 @@ public GetQueryResultsResponse call() { .setTotalRows(results.getTotalRows() == null ? 0 : results.getTotalRows().longValue()) .setErrors(errors.build()) .build(); - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); } } @@ -1397,21 +2211,40 @@ public Policy getIamPolicy(TableId tableId, IAMOption... options) { ? getOptions().getProjectId() : tableId.getProject()); - try { - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(options); + Span iamPolicyGet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + iamPolicyGet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.getIamPolicy") + .setAllAttributes(tableId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope iamPolicyGetScope = iamPolicyGet != null ? iamPolicyGet.makeCurrent() : null) { return convertFromApiPolicy( - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Policy call() { - return bigQueryRpc.getIamPolicy(completeTableId.getIAMResourceName(), optionsMap); + public com.google.api.services.bigquery.model.Policy call() throws IOException { + return bigQueryRpc.getIamPolicySkipExceptionTranslation( + completeTableId.getIAMResourceName(), optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelper.RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (iamPolicyGet != null) { + iamPolicyGet.end(); + } } } @@ -1422,22 +2255,42 @@ public Policy setIamPolicy(TableId tableId, final Policy policy, IAMOption... op Strings.isNullOrEmpty(tableId.getProject()) ? getOptions().getProjectId() : tableId.getProject()); - try { - final Map optionsMap = optionMap(options); + + final Map optionsMap = optionMap(options); + Span iamPolicySet = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + iamPolicySet = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.setIamPolicy") + .setAllAttributes(tableId.getOtelAttributes()) + .setAllAttributes(otelAttributesFromPolicy(policy)) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope iamPolicySetScope = iamPolicySet != null ? iamPolicySet.makeCurrent() : null) { return convertFromApiPolicy( - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Policy call() { - return bigQueryRpc.setIamPolicy( + public com.google.api.services.bigquery.model.Policy call() throws IOException { + return bigQueryRpc.setIamPolicySkipExceptionTranslation( completeTableId.getIAMResourceName(), convertToApiPolicy(policy), optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock())); - } catch (RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer())); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (iamPolicySet != null) { + iamPolicySet.end(); + } } } @@ -1449,23 +2302,46 @@ public List testIamPermissions( Strings.isNullOrEmpty(tableId.getProject()) ? getOptions().getProjectId() : tableId.getProject()); - try { - final Map optionsMap = optionMap(options); + final Map optionsMap = optionMap(options); + Span testIamPermissions = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + testIamPermissions = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQuery.testIamPermissions") + .setAllAttributes(tableId.getOtelAttributes()) + .setAttribute("bq.iam.permissions", permissions.toString()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + try (Scope testIamPermissionsScope = + testIamPermissions != null ? testIamPermissions.makeCurrent() : null) { com.google.api.services.bigquery.model.TestIamPermissionsResponse response = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.TestIamPermissionsResponse call() { - return bigQueryRpc.testIamPermissions( + public com.google.api.services.bigquery.model.TestIamPermissionsResponse call() + throws IOException { + return bigQueryRpc.testIamPermissionsSkipExceptionTranslation( completeTableId.getIAMResourceName(), permissions, optionsMap); } }, getOptions().getRetrySettings(), - EXCEPTION_HANDLER, - getOptions().getClock()); - return ImmutableList.copyOf(response.getPermissions()); - } catch (RetryHelperException e) { + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); + return response.getPermissions() == null + ? ImmutableList.of() + : ImmutableList.copyOf(response.getPermissions()); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (testIamPermissions != null) { + testIamPermissions.end(); + } } } @@ -1478,4 +2354,55 @@ public com.google.api.services.bigquery.model.TestIamPermissionsResponse call() } return optionMap; } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + private static Attributes otelAttributesFromOptions(Option... options) { + Attributes attributes = Attributes.builder().build(); + for (Option option : options) { + attributes = + attributes.toBuilder() + .put("bq.option." + option.getRpcOption().toString(), option.getValue().toString()) + .build(); + } + return attributes; + } + + private static Attributes otelAttributesFromQueryRequest(QueryRequest request) { + return Attributes.builder() + .put("bq.query.dry_run", getFieldAsString(request.getDryRun())) + .put("bq.query.job_creation_mode", getFieldAsString(request.getJobCreationMode())) + .put("bq.query.kind", getFieldAsString(request.getKind())) + .put("bq.query.location", getFieldAsString(request.getLocation())) + .put("bq.query.request_id", getFieldAsString(request.getRequestId())) + .put("bq.query.use_query_cache", getFieldAsString(request.getUseQueryCache())) + .build(); + } + + private static Attributes otelAttributesFromPolicy(Policy policy) { + return Attributes.builder() + .put("bq.policy.version", getFieldAsString(policy.getVersion())) + .put("bq.policy.bindings", getFieldAsString(policy.getBindings())) + .build(); + } + + static BigQueryRetryConfig getBigQueryRetryConfig(Map options) { + return (BigQueryRetryConfig) + options.getOrDefault(BigQueryRpc.Option.BIGQUERY_RETRY_CONFIG, null); + } + + static RetryOption[] getRetryOptions(Map options) { + return (RetryOption[]) options.getOrDefault(BigQueryRpc.Option.RETRY_OPTIONS, null); + } + + private static boolean isRetryErrorCodeHttpNotFound(BigQueryRetryHelperException e) { + if (e.getCause() instanceof BigQueryException) { + if (((BigQueryException) e.getCause()).getCode() == HTTP_NOT_FOUND) { + return true; + } + } + return false; + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java index 5687d78989..10ae77930c 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java @@ -16,26 +16,38 @@ package com.google.cloud.bigquery; +import com.google.api.core.BetaApi; +import com.google.api.core.ObsoleteApi; +import com.google.api.gax.retrying.ResultRetryAlgorithm; import com.google.cloud.ServiceDefaults; import com.google.cloud.ServiceOptions; import com.google.cloud.ServiceRpc; import com.google.cloud.TransportOptions; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; import com.google.cloud.bigquery.spi.BigQueryRpcFactory; -import com.google.cloud.bigquery.spi.v2.BigQueryRpc; import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import com.google.cloud.http.HttpTransportOptions; +import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; +import io.opentelemetry.api.trace.Tracer; import java.util.Set; public class BigQueryOptions extends ServiceOptions { private static final String API_SHORT_NAME = "BigQuery"; + private static final int DEFAULT_READ_API_TIME_OUT = 60000; private static final String BIGQUERY_SCOPE = "https://www.googleapis.com/auth/bigquery"; private static final Set SCOPES = ImmutableSet.of(BIGQUERY_SCOPE); - private static final long serialVersionUID = -2437598817433266049L; + private static final long serialVersionUID = -2437598817433266048L; private final String location; // set the option ThrowNotFound when you want to throw the exception when the value not found private boolean setThrowNotFound; + private boolean useInt64Timestamps; + private DataFormatOptions dataFormatOptions; + private JobCreationMode defaultJobCreationMode = JobCreationMode.JOB_CREATION_MODE_UNSPECIFIED; + private boolean enableOpenTelemetryTracing; + private Tracer openTelemetryTracer; + private ResultRetryAlgorithm resultRetryAlgorithm; public static class DefaultBigQueryFactory implements BigQueryFactory { @@ -60,6 +72,11 @@ public ServiceRpc create(BigQueryOptions options) { public static class Builder extends ServiceOptions.Builder { private String location; + private boolean useInt64Timestamps; + private DataFormatOptions dataFormatOptions; + private boolean enableOpenTelemetryTracing; + private Tracer openTelemetryTracer; + private ResultRetryAlgorithm resultRetryAlgorithm; private Builder() {} @@ -81,6 +98,59 @@ public Builder setLocation(String location) { return this; } + /** + * This setter is marked as Obsolete. Prefer {@link #setDataFormatOptions(DataFormatOptions)} to + * set the int64timestamp configuration instead. + * + *

    If useInt64Timestamps value is set in here and via DataFormatOptions, the + * DataFormatOptions configuration value is used. + * + *

    {@code DataFormatOptions.newBuilder().setUseInt64Timestamp(...).build()} + */ + @ObsoleteApi("Use setDataFormatOptions(DataFormatOptions) instead") + public Builder setUseInt64Timestamps(boolean useInt64Timestamps) { + this.useInt64Timestamps = useInt64Timestamps; + return this; + } + + /** + * Set the format options for the BigQuery data types + * + * @param dataFormatOptions Configuration of the formatting options + */ + public Builder setDataFormatOptions(DataFormatOptions dataFormatOptions) { + Preconditions.checkNotNull(dataFormatOptions, "DataFormatOptions cannot be null"); + this.dataFormatOptions = dataFormatOptions; + return this; + } + + /** + * Enables OpenTelemetry tracing functionality for this BigQuery instance + * + * @param enableOpenTelemetryTracing enables OpenTelemetry tracing if true + */ + @BetaApi + public Builder setEnableOpenTelemetryTracing(boolean enableOpenTelemetryTracing) { + this.enableOpenTelemetryTracing = enableOpenTelemetryTracing; + return this; + } + + /** + * Sets the OpenTelemetry tracer for this BigQuery instance to be tracer. + * + * @param tracer OpenTelemetry tracer to be used + */ + @BetaApi + public Builder setOpenTelemetryTracer(Tracer tracer) { + this.openTelemetryTracer = tracer; + return this; + } + + public Builder setResultRetryAlgorithm(ResultRetryAlgorithm resultRetryAlgorithm) { + this.resultRetryAlgorithm = resultRetryAlgorithm; + return this; + } + @Override public BigQueryOptions build() { return new BigQueryOptions(this); @@ -90,6 +160,23 @@ public BigQueryOptions build() { private BigQueryOptions(Builder builder) { super(BigQueryFactory.class, BigQueryRpcFactory.class, builder, new BigQueryDefaults()); this.location = builder.location; + this.useInt64Timestamps = builder.useInt64Timestamps; + this.enableOpenTelemetryTracing = builder.enableOpenTelemetryTracing; + this.openTelemetryTracer = builder.openTelemetryTracer; + if (builder.resultRetryAlgorithm != null) { + this.resultRetryAlgorithm = builder.resultRetryAlgorithm; + } else { + this.resultRetryAlgorithm = BigQueryBaseService.DEFAULT_BIGQUERY_EXCEPTION_HANDLER; + } + + // If dataFormatOptions is not set, then create a new instance and set it with the + // useInt64Timestamps configured in BigQueryOptions + if (builder.dataFormatOptions == null) { + this.dataFormatOptions = + DataFormatOptions.newBuilder().useInt64Timestamp(builder.useInt64Timestamps).build(); + } else { + this.dataFormatOptions = builder.dataFormatOptions; + } } private static class BigQueryDefaults implements ServiceDefaults { @@ -113,7 +200,7 @@ public TransportOptions getDefaultTransportOptions() { } public static HttpTransportOptions getDefaultHttpTransportOptions() { - return HttpTransportOptions.newBuilder().build(); + return HttpTransportOptions.newBuilder().setReadTimeout(DEFAULT_READ_API_TIME_OUT).build(); } @Override @@ -121,22 +208,99 @@ protected Set getScopes() { return SCOPES; } - protected BigQueryRpc getBigQueryRpcV2() { - return (BigQueryRpc) getRpc(); + protected HttpBigQueryRpc getBigQueryRpcV2() { + return (HttpBigQueryRpc) getRpc(); } public String getLocation() { return location; } + @Deprecated + public boolean isQueryPreviewEnabled() { + return false; + } + public void setThrowNotFound(boolean setThrowNotFound) { this.setThrowNotFound = setThrowNotFound; } + /** + * This setter is marked as Obsolete. Prefer {@link + * Builder#setDataFormatOptions(DataFormatOptions)} to set the int64timestamp configuration + * instead. + * + *

    If useInt64Timestamps is set via DataFormatOptions, then the value in DataFormatOptions will + * be used. Otherwise, this value will be passed to DataFormatOptions. + * + *

    Alternative: {@code DataFormatOptions.newBuilder().setUseInt64Timestamp(...).build()} + */ + @ObsoleteApi("Use Builder#setDataFormatOptions(DataFormatOptions) instead") + public void setUseInt64Timestamps(boolean useInt64Timestamps) { + this.useInt64Timestamps = useInt64Timestamps; + // Because this setter exists outside the Builder, DataFormatOptions needs be rebuilt to + // account for this setting. + this.dataFormatOptions = + dataFormatOptions.toBuilder().useInt64Timestamp(useInt64Timestamps).build(); + } + + @Deprecated + public void setQueryPreviewEnabled(String queryPreviewEnabled) {} + + public void setDefaultJobCreationMode(JobCreationMode jobCreationMode) { + this.defaultJobCreationMode = jobCreationMode; + } + public boolean getThrowNotFound() { return setThrowNotFound; } + /** + * This getter is marked as Obsolete. Prefer {@link + * DataFormatOptions.Builder#useInt64Timestamp(boolean)} to set the int64timestamp configuration + * instead. + * + *

    Warning: DataFormatOptions values have precedence. Use {@link + * DataFormatOptions#useInt64Timestamp()} to get `useInt64Timestamp` value used by the BigQuery + * client. + */ + @ObsoleteApi("Use getDataFormatOptions().isUseInt64Timestamp() instead") + public boolean getUseInt64Timestamps() { + return dataFormatOptions.useInt64Timestamp(); + } + + public DataFormatOptions getDataFormatOptions() { + return dataFormatOptions; + } + + public JobCreationMode getDefaultJobCreationMode() { + return defaultJobCreationMode; + } + + /** + * Returns whether this BigQuery instance has OpenTelemetry tracing enabled + * + * @return true if tracing is enabled, false if not + */ + @BetaApi("Span names and attributes are subject to change without notice") + public boolean isOpenTelemetryTracingEnabled() { + return enableOpenTelemetryTracing; + } + + /** + * Returns the OpenTelemetry tracer used by this BigQuery instance + * + * @return OpenTelemetry tracer object or {@code null} if not set + */ + @BetaApi("Span names and attributes are subject to change without notice") + public Tracer getOpenTelemetryTracer() { + return openTelemetryTracer; + } + + public ResultRetryAlgorithm getResultRetryAlgorithm() { + return resultRetryAlgorithm; + } + @SuppressWarnings("unchecked") @Override public Builder toBuilder() { diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResult.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResult.java new file mode 100644 index 0000000000..6b0c35f67c --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResult.java @@ -0,0 +1,38 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import java.sql.ResultSet; + +public interface BigQueryResult { + + /** Returns the schema of the results. */ + Schema getSchema(); + + /** + * Returns the total number of rows in the complete result set, which can be more than the number + * of rows in the first page of results. This might return -1 if the query is long running and the + * job is not complete at the time this object is returned. + */ + long getTotalRows(); + + /* Returns the underlying ResultSet Implementation */ + ResultSet getResultSet(); + + /* Returns the query statistics associated with this query. */ + BigQueryResultStats getBigQueryResultStats(); +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java new file mode 100644 index 0000000000..b705e77c11 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultImpl.java @@ -0,0 +1,835 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.cloud.bigquery.FieldValue.Attribute; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.TimeUnit; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.apache.arrow.vector.util.Text; + +/** + * An implementation of BigQueryResult. + * + *

    This class and the ResultSet it returns is not thread-safe. + */ +public class BigQueryResultImpl implements BigQueryResult { + + private static final String NULL_CURSOR_MSG = + "Error occurred while reading the cursor. This could happen if getters are called after we are done reading all the records"; + + // This class represents a row of records, the columns are represented as a map + // (columnName:columnValue pair) + static class Row { + private Map value; + private boolean isLast; + + public Row(Map value) { + this.value = value; + } + + public Row(Map value, boolean isLast) { + this.value = value; + this.isLast = isLast; + } + + public Map getValue() { + return value; + } + + public boolean isLast() { + return isLast; + } + + public boolean hasField(String fieldName) { + return this.value.containsKey(fieldName); + } + + public Object get(String fieldName) { + return this.value.get(fieldName); + } + } + + private final Schema schema; + private final long totalRows; + private final BlockingQueue buffer; + private T cursor; + private final BigQueryResultSet underlyingResultSet; + private final BigQueryResultStats bigQueryResultStats; + private final FieldList schemaFieldList; + + public BigQueryResultImpl( + Schema schema, + long totalRows, + BlockingQueue buffer, + BigQueryResultStats bigQueryResultStats) { + this.schema = schema; + this.totalRows = totalRows; + this.buffer = buffer; + this.underlyingResultSet = new BigQueryResultSet(); + this.bigQueryResultStats = bigQueryResultStats; + this.schemaFieldList = schema.getFields(); + } + + @Override + public Schema getSchema() { + return schema; + } + + @Override + public long getTotalRows() { + return totalRows; + } + + @Override + public ResultSet getResultSet() { + return underlyingResultSet; + } + + private class BigQueryResultSet extends AbstractJdbcResultSet { + private boolean hasReachedEnd = + false; // flag which will be set to true when we have encountered a EndOfStream or when + // curTup.isLast(). Ref: https://github.com/googleapis/java-bigquery/issues/2033 + private boolean wasNull = false; + + private class BigQueryArrayResult implements java.sql.Array { + List array; + + public BigQueryArrayResult(Object array) { + if (array instanceof Object[]) { + this.array = new ArrayList<>(Arrays.asList((Object[]) array)); + } else if (array instanceof FieldValueList) { + this.array = new ArrayList<>((FieldValueList) array); + } else { + this.array = (List) array; + } + } + + @Override + public String getBaseTypeName() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public int getBaseType() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Object getArray() throws SQLException { + return array; + } + + @Override + public Object getArray(java.util.Map> map) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Object getArray(long index, int count) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public Object getArray(long index, int count, java.util.Map> map) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public ResultSet getResultSet() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public ResultSet getResultSet(java.util.Map> map) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public ResultSet getResultSet(long index, int count) throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public ResultSet getResultSet(long index, int count, java.util.Map> map) + throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + + @Override + public void free() throws SQLException { + throw new SQLFeatureNotSupportedException(); + } + } + + @Override + /*Advances the result set to the next row, returning false if no such row exists. Potentially blocking operation*/ + public boolean next() throws SQLException { + if (buffer == null) { + return false; + } + if (hasReachedEnd) { // if end of stream is reached then we can simply return false + return false; + } + try { + cursor = buffer.take(); // advance the cursor,Potentially blocking operation + if (isEndOfStream(cursor)) { // check for end of stream + cursor = null; + hasReachedEnd = true; + return false; + } else if (cursor instanceof Row) { + Row curTup = (Row) cursor; + if (curTup.isLast()) { // last Tuple + cursor = null; + hasReachedEnd = true; + return false; + } + return true; + } else if (cursor instanceof FieldValueList) { // cursor is advanced, we can return true now + return true; + } else { // this case should never occur as the cursor will either be a Row of EoS + throw new BigQuerySQLException("Could not process the current row"); + } + } catch (InterruptedException e) { + throw new SQLException( + "Error occurred while advancing the cursor. This could happen when connection is closed while we call the next method"); + } + } + + private boolean isEndOfStream(T cursor) { + return cursor instanceof ConnectionImpl.EndOfFieldValueList; + } + + private Object getCurrentValueForReadApiData(String fieldName) throws SQLException { + Row curRow = (Row) cursor; + if (!curRow.hasField(fieldName)) { + throw new SQLException(String.format("Field %s not found", fieldName)); + } + return curRow.get(fieldName); + } + + @Override + public java.sql.Array getArray(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + if (fieldValue.getAttribute().equals(Attribute.REPEATED)) { + return new BigQueryArrayResult(fieldValue.getValue()); + } else { + wasNull = true; + return null; + } + } else { // Data received from Read API (Arrow) + Object currentVal = getCurrentValueForReadApiData(fieldName); + if (currentVal == null) { + wasNull = true; + return null; + } + wasNull = false; + return new BigQueryArrayResult(currentVal); + } + } + + @Override + public java.sql.Array getArray(int columnIndex) throws SQLException { + if (cursor == null) { + return null; + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if (fieldValue == null || fieldValue.getValue() == null) { + wasNull = true; + return null; + } + wasNull = false; + return new BigQueryArrayResult(fieldValue.getValue()); + } else { + return getArray(schemaFieldList.get(columnIndex).getName()); + } + } + + @Override + public Object getObject(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if (fieldValue == null || fieldValue.getValue() == null) { + wasNull = true; + return null; + } + wasNull = false; + return fieldValue.getValue(); + } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; + } + wasNull = false; + return curVal; + } + } + + @Override + public Object getObject(int columnIndex) throws SQLException { + if (cursor == null) { + return null; + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if (fieldValue == null || fieldValue.getValue() == null) { + wasNull = true; + return null; + } + wasNull = false; + return fieldValue.getValue(); + } else { // Data received from Read API (Arrow) + return getObject(schemaFieldList.get(columnIndex).getName()); + } + } + + @Override + public String getString(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + if (fieldValue.getAttribute().equals(FieldValue.Attribute.REPEATED)) { // Case for Arrays + return fieldValue.getValue().toString(); + } else { + return fieldValue.getStringValue(); + } + } else { // Data received from Read API (Arrow) + Object currentVal = getCurrentValueForReadApiData(fieldName); + if (currentVal == null) { + wasNull = true; + return null; + } + wasNull = false; + if (currentVal instanceof JsonStringArrayList) { // arrays + JsonStringArrayList jsnAry = (JsonStringArrayList) currentVal; + return jsnAry.toString(); + } else if (currentVal instanceof LocalDateTime) { + LocalDateTime dateTime = (LocalDateTime) currentVal; + return dateTime.toString(); + } else { + Text textVal = (Text) currentVal; + return textVal.toString(); + } + } + } + + @Override + public String getString(int columnIndex) throws SQLException { + if (cursor == null) { + return null; + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if (fieldValue == null || fieldValue.getValue() == null) { + wasNull = true; + return null; + } + wasNull = false; + return fieldValue.getStringValue(); + } else { // Data received from Read API (Arrow) + return getString(schemaFieldList.get(columnIndex).getName()); + } + } + + @Override + public int getInt(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + return 0; // the column value; if the value is SQL NULL, the value returned is 0 as per + // java.sql.ResultSet definition + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return 0; + } + wasNull = false; + return fieldValue.getNumericValue().intValue(); + } else { // Data received from Read API (Arrow) + Object currentVal = getCurrentValueForReadApiData(fieldName); + if (currentVal == null) { + wasNull = true; + return 0; + } + wasNull = false; + if (currentVal instanceof Text) { // parse from text to int + return Integer.parseInt((currentVal).toString()); + } else if (currentVal + instanceof + Long) { // incase getInt is called for a Long value. Loss of precision might occur + return ((Long) currentVal).intValue(); + } + return ((BigDecimal) currentVal).intValue(); + } + } + + @Override + public int getInt(int columnIndex) throws SQLException { + if (cursor == null) { + return 0; // the column value; if the value is SQL NULL, the value returned is 0 as per + // java.sql.ResultSet definition + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if (fieldValue == null || fieldValue.getValue() == null) { + wasNull = true; + return 0; + } + return fieldValue.getNumericValue().intValue(); + } else { // Data received from Read API (Arrow) + return getInt(schemaFieldList.get(columnIndex).getName()); + } + } + + @Override + public long getLong(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return 0L; + } + wasNull = false; + return fieldValue.getNumericValue().longValue(); + } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return 0L; + } + wasNull = false; + // value will be Long or BigDecimal, but are Number + return ((Number) curVal).longValue(); + } + } + + @Override + public long getLong(int columnIndex) throws SQLException { + if (cursor == null) { + return 0L; // the column value; if the value is SQL NULL, the value returned is 0 as per + // java.sql.ResultSet definition + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return 0L; + } + wasNull = false; + return fieldValue.getNumericValue().longValue(); + } else { // Data received from Read API (Arrow) + return getInt(schemaFieldList.get(columnIndex).getName()); + } + } + + @Override + public double getDouble(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return 0.0d; + } + wasNull = false; + return fieldValue.getNumericValue().doubleValue(); + } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return 0.0d; + } + wasNull = false; + return new BigDecimal(curVal.toString()).doubleValue(); + } + } + + @Override + public double getDouble(int columnIndex) throws SQLException { + if (cursor == null) { + return 0d; // the column value; if the value is SQL NULL, the value returned is 0 as per + // java.sql.ResultSet definition + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return 0.0d; + } + wasNull = false; + return fieldValue.getNumericValue().doubleValue(); + } else { // Data received from Read API (Arrow) + return getDouble(schemaFieldList.get(columnIndex).getName()); + } + } + + @Override + public BigDecimal getBigDecimal(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return BigDecimal.valueOf(fieldValue.getNumericValue().doubleValue()); + } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; + } + wasNull = false; + return BigDecimal.valueOf(getDouble(fieldName)); + } + } + + @Override + public BigDecimal getBigDecimal(int columnIndex) throws SQLException { + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return BigDecimal.valueOf(fieldValue.getNumericValue().doubleValue()); + } else { // Data received from Read API (Arrow) + return getBigDecimal(schemaFieldList.get(columnIndex).getName()); + } + } + + @Override + public boolean getBoolean(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return false; + } + wasNull = false; + return fieldValue.getBooleanValue(); + } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return false; + } + wasNull = false; + return (Boolean) curVal; + } + } + + @Override + public boolean getBoolean(int columnIndex) throws SQLException { + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return false; + } + wasNull = false; + return fieldValue.getBooleanValue(); + } else { // Data received from Read API (Arrow) + return getBoolean(schemaFieldList.get(columnIndex).getName()); + } + } + + @Override + public byte[] getBytes(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return fieldValue.getBytesValue(); + } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; + } + wasNull = false; + return (byte[]) curVal; + } + } + + @Override + public byte[] getBytes(int columnIndex) throws SQLException { + if (cursor == null) { + return null; // if the value is SQL NULL, the value returned is null + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return fieldValue.getBytesValue(); + } else { // Data received from Read API (Arrow) + return getBytes(schemaFieldList.get(columnIndex).getName()); + } + } + + @Override + public Timestamp getTimestamp(String fieldName) throws SQLException { + if (fieldName == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } + if (cursor == null) { + return null; // if the value is SQL NULL, the value returned is null + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return new Timestamp( + fieldValue.getTimestampValue() + / 1000); // getTimestampValue returns time in microseconds, and TimeStamp expects it + // in millis + } else { + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; + } + wasNull = false; + return new Timestamp((Long) curVal / 1000); // Timestamp is represented as a Long + } + } + + @Override + public Timestamp getTimestamp(int columnIndex) throws SQLException { + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return new Timestamp( + fieldValue.getTimestampValue() + / 1000); // getTimestampValue returns time in microseconds, and TimeStamp expects it + // in millis + } else { // Data received from Read API (Arrow) + return getTimestamp(schemaFieldList.get(columnIndex).getName()); + } + } + + @Override + public Time getTime(String fieldName) throws SQLException { + if (fieldName == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } + if (cursor == null) { + return null; // if the value is SQL NULL, the value returned is null + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return getTimeFromFieldVal(fieldValue); + } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; + } + wasNull = false; + return new Time( + ((Long) curVal) + / 1000); // Time.toString() will return 12:11:35 in GMT as 17:41:35 in (GMT+5:30). + // This can be offset using getTimeZoneOffset + } + } + + @Override + public Time getTime(int columnIndex) throws SQLException { + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return getTimeFromFieldVal(fieldValue); + } else { // Data received from Read API (Arrow) + return getTime(schemaFieldList.get(columnIndex).getName()); + } + } + + // Expects fieldValue.getValue() != null. + private Time getTimeFromFieldVal(FieldValue fieldValue) throws SQLException { + // Time ranges from 00:00:00 to 23:59:59.99999. in BigQuery. Parsing it to java.sql.Time + String strTime = fieldValue.getStringValue(); + String[] timeSplt = strTime.split(":"); + if (timeSplt.length != 3) { + throw new SQLException("Can not parse the value " + strTime + " to java.sql.Time"); + } + int hr = Integer.parseInt(timeSplt[0]); + int min = Integer.parseInt(timeSplt[1]); + int sec, nanoSec = 0; + if (timeSplt[2].contains(".")) { + String[] secSplt = timeSplt[2].split("\\."); + sec = Integer.parseInt(secSplt[0]); + nanoSec = Integer.parseInt(secSplt[1]); + } else { + sec = Integer.parseInt(timeSplt[2]); + } + return Time.valueOf(LocalTime.of(hr, min, sec, nanoSec)); + } + + @Override + public Date getDate(String fieldName) throws SQLException { + if (fieldName == null) { + throw new SQLException("fieldName can't be null"); + } + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(fieldName); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return Date.valueOf(fieldValue.getStringValue()); + } else { // Data received from Read API (Arrow) + Object curVal = getCurrentValueForReadApiData(fieldName); + if (curVal == null) { + wasNull = true; + return null; + } + wasNull = false; + Integer dateInt = (Integer) curVal; + long dateInMillis = + TimeUnit.DAYS.toMillis( + Long.valueOf( + dateInt)); // For example int 18993 represents 2022-01-01, converting time to + // milli seconds + return new Date(dateInMillis); + } + } + + @Override + public Date getDate(int columnIndex) throws SQLException { + if (cursor == null) { + throw new BigQuerySQLException(NULL_CURSOR_MSG); + } else if (cursor instanceof FieldValueList) { + FieldValue fieldValue = ((FieldValueList) cursor).get(columnIndex); + if ((fieldValue == null || fieldValue.getValue() == null)) { + wasNull = true; + return null; + } + wasNull = false; + return Date.valueOf(fieldValue.getStringValue()); + } else { // Data received from Read API (Arrow) + return getDate(schemaFieldList.get(columnIndex).getName()); + } + } + + /** + * Returns whether the last column read had a value of SQL NULL. Note that you must first call + * one of the getter methods on a column to try to read its value and then call the method + * wasNull to see if the value read was SQL NULL. * + */ + @Override + public boolean wasNull() { + return wasNull; + } + } + + @Override + public BigQueryResultStats getBigQueryResultStats() { + return bigQueryResultStats; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultStats.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultStats.java new file mode 100644 index 0000000000..a4c37a9b66 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultStats.java @@ -0,0 +1,36 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.api.core.BetaApi; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.SessionInfo; + +public interface BigQueryResultStats { + + /** Returns query statistics of a query job */ + @BetaApi + QueryStatistics getQueryStatistics(); + + /** + * Returns SessionInfo contains information about the session if this job is part of one. + * JobStatistics2 model class does not allow setSessionInfo so this cannot be set as part of + * QueryStatistics when we use jobs.query API. + */ + @BetaApi + SessionInfo getSessionInfo(); +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultStatsImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultStatsImpl.java new file mode 100644 index 0000000000..53d67f8f3a --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryResultStatsImpl.java @@ -0,0 +1,41 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.SessionInfo; + +public class BigQueryResultStatsImpl implements BigQueryResultStats { + + private final QueryStatistics queryStatistics; + private final SessionInfo sessionInfo; + + public BigQueryResultStatsImpl(QueryStatistics queryStatistics, SessionInfo sessionInfo) { + this.queryStatistics = queryStatistics; + this.sessionInfo = sessionInfo; + } + + @Override + public QueryStatistics getQueryStatistics() { + return queryStatistics; + } + + @Override + public SessionInfo getSessionInfo() { + return sessionInfo; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryAlgorithm.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryAlgorithm.java new file mode 100644 index 0000000000..140f2c6eba --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryAlgorithm.java @@ -0,0 +1,232 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.api.gax.retrying.ResultRetryAlgorithm; +import com.google.api.gax.retrying.ResultRetryAlgorithmWithContext; +import com.google.api.gax.retrying.RetryAlgorithm; +import com.google.api.gax.retrying.RetryingContext; +import com.google.api.gax.retrying.TimedAttemptSettings; +import com.google.api.gax.retrying.TimedRetryAlgorithm; +import com.google.api.gax.retrying.TimedRetryAlgorithmWithContext; +import com.google.api.services.bigquery.model.ErrorProto; +import com.google.api.services.bigquery.model.Job; +import java.time.Duration; +import java.util.Iterator; +import java.util.UUID; +import java.util.concurrent.CancellationException; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.regex.Pattern; + +public class BigQueryRetryAlgorithm extends RetryAlgorithm { + private final BigQueryRetryConfig bigQueryRetryConfig; + private final ResultRetryAlgorithm resultAlgorithm; + private final TimedRetryAlgorithm timedAlgorithm; + private final ResultRetryAlgorithmWithContext resultAlgorithmWithContext; + private final TimedRetryAlgorithmWithContext timedAlgorithmWithContext; + + private static final Logger LOG = Logger.getLogger(BigQueryRetryAlgorithm.class.getName()); + private static final UUID RETRY_UUID = UUID.randomUUID(); + + public BigQueryRetryAlgorithm( + ResultRetryAlgorithm resultAlgorithm, + TimedRetryAlgorithm timedAlgorithm, + BigQueryRetryConfig bigQueryRetryConfig) { + super(resultAlgorithm, timedAlgorithm); + this.bigQueryRetryConfig = checkNotNull(bigQueryRetryConfig); + this.resultAlgorithm = checkNotNull(resultAlgorithm); + this.timedAlgorithm = checkNotNull(timedAlgorithm); + this.resultAlgorithmWithContext = null; + this.timedAlgorithmWithContext = null; + } + + @Override + public boolean shouldRetry( + RetryingContext context, + Throwable previousThrowable, + ResponseT previousResponse, + TimedAttemptSettings nextAttemptSettings) + throws CancellationException { + // Log retry info + int attemptCount = nextAttemptSettings == null ? 0 : nextAttemptSettings.getAttemptCount(); + Duration retryDelay = + nextAttemptSettings == null ? Duration.ZERO : nextAttemptSettings.getRetryDelayDuration(); + String errorMessage = previousThrowable != null ? previousThrowable.getMessage() : ""; + + // Implementing shouldRetryBasedOnBigQueryRetryConfig so that we can retry exceptions based on + // the exception messages + boolean shouldRetry = + (shouldRetryBasedOnResult(context, previousThrowable, previousResponse) + || shouldRetryBasedOnBigQueryRetryConfig( + previousThrowable, bigQueryRetryConfig, previousResponse)) + && shouldRetryBasedOnTiming(context, nextAttemptSettings); + + if (LOG.isLoggable(Level.FINEST)) { + LOG.log( + Level.FINEST, + "Retrying with:\n{0}\n{1}\n{2}\n{3}\n{4}\n{5}", + new Object[] { + "BigQuery attemptCount: " + attemptCount, + "BigQuery delay: " + retryDelay, + "BigQuery retriableException: " + previousThrowable, + "BigQuery shouldRetry: " + shouldRetry, + "BigQuery previousThrowable.getMessage: " + errorMessage, + "BigQuery retry identifier: " + RETRY_UUID + }); + } + return shouldRetry; + } + + private boolean shouldRetryBasedOnBigQueryRetryConfig( + Throwable previousThrowable, + BigQueryRetryConfig bigQueryRetryConfig, + ResponseT previousResponse) { + /* + We are deciding if a given error should be retried on the basis of error message. + Cannot rely on Error/Status code as for example error code 400 (which is not retriable) could be thrown due to rateLimitExceed, which is retriable + */ + String errorDesc = null; + if (previousThrowable != null) { + errorDesc = previousThrowable.getMessage(); + } else if (previousResponse != null) { + /* + In some cases error messages may come without an exception + e.g. status code 200 with a rate limit exceeded for job create + in these cases there is no previousThrowable so we need + to check for error messages in previousResponse + */ + errorDesc = getErrorDescFromResponse(previousResponse); + } + + if (errorDesc != null) { + errorDesc = errorDesc.toLowerCase(); // for case insensitive comparison + for (Iterator retriableMessages = + bigQueryRetryConfig.getRetriableErrorMessages().iterator(); + retriableMessages.hasNext(); ) { + if (errorDesc.contains( + retriableMessages + .next() + .toLowerCase())) { // Error message should be retried, implementing cases + // insensitive match + return true; + } + } + // Check if there's a regex which matches the error message. This avoids too many regex + // matches which is expensive + for (Iterator retriableRegExes = bigQueryRetryConfig.getRetriableRegExes().iterator(); + retriableRegExes.hasNext(); ) { + if (matchRegEx(retriableRegExes.next(), errorDesc)) { + return true; + } + } + } + return false; + } + + public static boolean matchRegEx( + String retriableRegEx, String errorDesc) { // cases insensitive match regex matching + return Pattern.matches(retriableRegEx.toLowerCase(), errorDesc.toLowerCase()); + } + + /*Duplicating this method as it can not be inherited from the RetryAlgorithm due to the default access modifier*/ + boolean shouldRetryBasedOnResult( + RetryingContext context, Throwable previousThrowable, ResponseT previousResponse) { + if (resultAlgorithmWithContext != null && context != null) { + return resultAlgorithmWithContext.shouldRetry(context, previousThrowable, previousResponse); + } + return getResultAlgorithm().shouldRetry(previousThrowable, previousResponse); + } + + /*Duplicating this method as it can not be inherited from the RetryAlgorithm due to the private access modifier*/ + private boolean shouldRetryBasedOnTiming( + RetryingContext context, TimedAttemptSettings nextAttemptSettings) { + if (nextAttemptSettings == null) { + return false; + } + if (timedAlgorithmWithContext != null && context != null) { + return timedAlgorithmWithContext.shouldRetry(context, nextAttemptSettings); + } + return getTimedAlgorithm().shouldRetry(nextAttemptSettings); + } + + @Override + public TimedAttemptSettings createNextAttempt( + RetryingContext context, + Throwable previousThrowable, + ResponseT previousResponse, + TimedAttemptSettings previousSettings) { + // a small optimization that avoids calling relatively heavy methods + // like timedAlgorithm.createNextAttempt(), when it is not necessary. + + if (!((shouldRetryBasedOnResult(context, previousThrowable, previousResponse) + || shouldRetryBasedOnBigQueryRetryConfig( + previousThrowable, + bigQueryRetryConfig, + previousResponse)))) { // Calling shouldRetryBasedOnBigQueryRetryConfig to check if + // the error message could be retried + return null; + } + + TimedAttemptSettings newSettings = + createNextAttemptBasedOnResult( + context, previousThrowable, previousResponse, previousSettings); + if (newSettings == null) { + newSettings = createNextAttemptBasedOnTiming(context, previousSettings); + } + return newSettings; + } + + /*Duplicating this method as it can not be inherited from the RetryAlgorithm due to the private access modifier*/ + private TimedAttemptSettings createNextAttemptBasedOnResult( + RetryingContext context, + Throwable previousThrowable, + ResponseT previousResponse, + TimedAttemptSettings previousSettings) { + if (resultAlgorithmWithContext != null && context != null) { + return resultAlgorithmWithContext.createNextAttempt( + context, previousThrowable, previousResponse, previousSettings); + } + return getResultAlgorithm() + .createNextAttempt(previousThrowable, previousResponse, previousSettings); + } + + /*Duplicating this method as it can not be inherited from the RetryAlgorithm due to the private access modifier*/ + private TimedAttemptSettings createNextAttemptBasedOnTiming( + RetryingContext context, TimedAttemptSettings previousSettings) { + if (timedAlgorithmWithContext != null && context != null) { + return timedAlgorithmWithContext.createNextAttempt(context, previousSettings); + } + return getTimedAlgorithm().createNextAttempt(previousSettings); + } + + private String getErrorDescFromResponse(ResponseT previousResponse) { + /* + error messages may come without an exception and must be extracted from response + following logic based on response body of jobs.insert method, so far the only + known case where a response with status code 200 may contain an error message + */ + if (previousResponse instanceof Job) { + Job job = (Job) previousResponse; + ErrorProto error = job.getStatus() != null ? job.getStatus().getErrorResult() : null; + return error != null ? error.getMessage() : null; + } + return null; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryConfig.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryConfig.java new file mode 100644 index 0000000000..2e1f7c0bda --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryConfig.java @@ -0,0 +1,68 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.common.collect.ImmutableSet; + +public class BigQueryRetryConfig { + private final ImmutableSet retriableErrorMessages; + private final ImmutableSet retriableRegExes; + + private BigQueryRetryConfig(Builder builder) { + retriableErrorMessages = builder.retriableErrorMessages.build(); + retriableRegExes = builder.retriableRegExes.build(); + } + + public ImmutableSet getRetriableErrorMessages() { + return retriableErrorMessages; + } + + public ImmutableSet getRetriableRegExes() { + return retriableRegExes; + } + + // BigQueryRetryConfig builder + public static class Builder { + private final ImmutableSet.Builder retriableErrorMessages = ImmutableSet.builder(); + private final ImmutableSet.Builder retriableRegExes = ImmutableSet.builder(); + + private Builder() {} + + public final Builder retryOnMessage(String... errorMessages) { + for (String errorMessage : errorMessages) { + retriableErrorMessages.add(checkNotNull(errorMessage)); + } + return this; + } + + public final Builder retryOnRegEx(String... regExPatterns) { + for (String regExPattern : regExPatterns) { + retriableRegExes.add(checkNotNull(regExPattern)); + } + return this; + } + + public BigQueryRetryConfig build() { + return new BigQueryRetryConfig(this); + } + } + + public static Builder newBuilder() { + return new Builder(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryHelper.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryHelper.java new file mode 100644 index 0000000000..9c70830465 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryRetryHelper.java @@ -0,0 +1,120 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import com.google.api.core.ApiClock; +import com.google.api.gax.retrying.DirectRetryingExecutor; +import com.google.api.gax.retrying.ExponentialRetryAlgorithm; +import com.google.api.gax.retrying.ResultRetryAlgorithm; +import com.google.api.gax.retrying.RetryAlgorithm; +import com.google.api.gax.retrying.RetrySettings; +import com.google.api.gax.retrying.RetryingExecutor; +import com.google.api.gax.retrying.RetryingFuture; +import com.google.api.gax.retrying.TimedRetryAlgorithm; +import com.google.cloud.RetryHelper; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import java.io.IOException; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.logging.Level; +import java.util.logging.Logger; + +public class BigQueryRetryHelper extends RetryHelper { + + private static final Logger LOG = Logger.getLogger(BigQueryRetryHelper.class.getName()); + + public static V runWithRetries( + Callable callable, + RetrySettings retrySettings, + ResultRetryAlgorithm resultRetryAlgorithm, + ApiClock clock, + BigQueryRetryConfig bigQueryRetryConfig, + boolean isOpenTelemetryEnabled, + Tracer openTelemetryTracer) + throws RetryHelperException { + Span runWithRetries = null; + if (isOpenTelemetryEnabled && openTelemetryTracer != null) { + runWithRetries = + openTelemetryTracer + .spanBuilder("com.google.cloud.bigquery.BigQueryRetryHelper.runWithRetries") + .startSpan(); + } + try (Scope runWithRetriesScope = runWithRetries != null ? runWithRetries.makeCurrent() : null) { + // Suppressing should be ok as a workaraund. Current and only ResultRetryAlgorithm + // implementation does not use response at all, so ignoring its type is ok. + @SuppressWarnings("unchecked") + ResultRetryAlgorithm algorithm = (ResultRetryAlgorithm) resultRetryAlgorithm; + return run( + callable, + new ExponentialRetryAlgorithm(retrySettings, clock), + algorithm, + bigQueryRetryConfig); + } catch (Exception e) { + // Checks for IOException and translate it into BigQueryException. The BigQueryException + // constructor parses the IOException and translate it into internal code. + if (e.getCause() instanceof IOException) { + throw new BigQueryRetryHelperException(new BigQueryException((IOException) e.getCause())); + } + throw new BigQueryRetryHelperException(e.getCause()); + } finally { + if (runWithRetries != null) { + runWithRetries.end(); + } + } + } + + private static V run( + Callable callable, + TimedRetryAlgorithm timedAlgorithm, + ResultRetryAlgorithm resultAlgorithm, + BigQueryRetryConfig bigQueryRetryConfig) + throws ExecutionException, InterruptedException { + RetryAlgorithm retryAlgorithm = + new BigQueryRetryAlgorithm<>( + resultAlgorithm, + timedAlgorithm, + bigQueryRetryConfig); // using BigQueryRetryAlgorithm in place of + // com.google.api.gax.retrying.RetryAlgorithm, as + // BigQueryRetryAlgorithm retries considering bigQueryRetryConfig + RetryingExecutor executor = new DirectRetryingExecutor<>(retryAlgorithm); + + // Log retry info + if (LOG.isLoggable(Level.FINEST)) { + LOG.log( + Level.FINEST, + "Retrying with:\n{0}\n{1}", + new Object[] { + "BigQuery retried method: " + callable.getClass().getEnclosingMethod().getName(), + "BigQuery retry settings: " + timedAlgorithm.createFirstAttempt().getGlobalSettings() + }); + } + + RetryingFuture retryingFuture = executor.createFuture(callable); + executor.submit(retryingFuture); + return retryingFuture.get(); + } + + public static class BigQueryRetryHelperException extends RuntimeException { + + private static final long serialVersionUID = -8519852520090965314L; + + BigQueryRetryHelperException(Throwable cause) { + super(cause); + } + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuerySQLException.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuerySQLException.java new file mode 100644 index 0000000000..672c6ad3fa --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuerySQLException.java @@ -0,0 +1,86 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import java.sql.SQLException; +import java.util.List; + +/** + * BigQuery service exception. + * + * @see Google Cloud BigQuery + * error codes + */ +public final class BigQuerySQLException extends SQLException { + + private static final long serialVersionUID = -5006625989225438209L; + private final List errors; + + public BigQuerySQLException() { + this.errors = null; + } + + public BigQuerySQLException( + String msg) { // overloaded constructor with just message as an argument + super(msg); + this.errors = null; + } + + public BigQuerySQLException(List errors) { + this.errors = errors; + } + + public BigQuerySQLException(List errors, Throwable cause) { + super(cause != null ? cause.toString() : null); + this.errors = errors; + } + + public BigQuerySQLException(String reason, List errors) { + super(reason); + this.errors = errors; + } + + public BigQuerySQLException(String reason, Throwable cause, List errors) { + super(reason, cause); + this.errors = errors; + } + + public BigQuerySQLException(String reason, String sqlState, List errors) { + super(reason, sqlState); + this.errors = errors; + } + + public BigQuerySQLException( + String reason, String sqlState, int errorCode, List errors) { + super(reason, sqlState, errorCode); + this.errors = errors; + } + + public BigQuerySQLException( + String reason, String sqlState, int errorCode, Throwable cause, List errors) { + super(reason, sqlState, errorCode, cause); + this.errors = errors; + } + + /** + * Returns a list of {@link BigQueryError}s that caused this exception. Returns {@code null} if + * none exists. + */ + public List getErrors() { + return errors; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CloneDefinition.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CloneDefinition.java new file mode 100644 index 0000000000..58594fe321 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CloneDefinition.java @@ -0,0 +1,85 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.api.client.util.DateTime; +import com.google.api.core.BetaApi; +import com.google.auto.value.AutoValue; +import com.google.common.annotations.VisibleForTesting; +import java.io.Serializable; +import javax.annotation.Nullable; + +@AutoValue +@BetaApi +public abstract class CloneDefinition implements Serializable { + + private static final long serialVersionUID = 1460853787400450649L; + + public static Builder newBuilder() { + return new AutoValue_CloneDefinition.Builder(); + } + + static CloneDefinition fromPb( + com.google.api.services.bigquery.model.CloneDefinition cloneDefinition) { + Builder builder = newBuilder(); + + if (cloneDefinition.getCloneTime() != null) { + builder.setCloneTime(cloneDefinition.getCloneTime().toString()); + } + if (cloneDefinition.getBaseTableReference() != null) { + builder.setBaseTableId(TableId.fromPb(cloneDefinition.getBaseTableReference())); + } + + return builder.build(); + } + + @Nullable + public abstract TableId getBaseTableId(); + + @Nullable + public abstract String getCloneTime(); + + /** Returns a builder for a Clone table definition. */ + @VisibleForTesting + public abstract Builder toBuilder(); + + com.google.api.services.bigquery.model.CloneDefinition toPb() { + + com.google.api.services.bigquery.model.CloneDefinition cloneDefinition = + new com.google.api.services.bigquery.model.CloneDefinition(); + cloneDefinition.setBaseTableReference(getBaseTableId().toPb()); + cloneDefinition.setCloneTime(DateTime.parseRfc3339(getCloneTime())); + + return cloneDefinition; + } + + @AutoValue.Builder + public abstract static class Builder { + + /** Reference describing the ID of the table that was Cloned. * */ + public abstract Builder setBaseTableId(TableId baseTableId); + + /** + * The time at which the base table was Cloned. This value is reported in the JSON response + * using RFC3339 format. * + */ + public abstract Builder setCloneTime(String dateTime); + + /** Creates a {@code CloneDefinition} object. */ + public abstract CloneDefinition build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ColumnReference.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ColumnReference.java new file mode 100644 index 0000000000..3dc688be5f --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ColumnReference.java @@ -0,0 +1,79 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import com.google.common.annotations.VisibleForTesting; +import javax.annotation.Nullable; + +@AutoValue +public abstract class ColumnReference { + public static ColumnReference.Builder newBuilder() { + return new AutoValue_ColumnReference.Builder(); + } + + static ColumnReference fromPb( + com.google.api.services.bigquery.model.TableConstraints.ForeignKeys.ColumnReferences + columnReference) { + ColumnReference.Builder builder = newBuilder(); + + if (columnReference.getReferencedColumn() != null) { + builder.setReferencedColumn(columnReference.getReferencedColumn()); + } + + if (columnReference.getReferencingColumn() != null) { + builder.setReferencingColumn(columnReference.getReferencingColumn()); + } + + return builder.build(); + } + + com.google.api.services.bigquery.model.TableConstraints.ForeignKeys.ColumnReferences toPb() { + + com.google.api.services.bigquery.model.TableConstraints.ForeignKeys.ColumnReferences + columnReference = + new com.google.api.services.bigquery.model.TableConstraints.ForeignKeys + .ColumnReferences(); + columnReference.setReferencedColumn(getReferencedColumn()); + columnReference.setReferencingColumn(getReferencingColumn()); + + return columnReference; + } + + @Nullable + public abstract String getReferencedColumn(); + + @Nullable + public abstract String getReferencingColumn(); + + /** Returns a builder for column reference. */ + @VisibleForTesting + public abstract ColumnReference.Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + + /** The target column of this reference. * */ + public abstract ColumnReference.Builder setReferencedColumn(String referencedColumn); + + /** The source column of this reference. * */ + public abstract ColumnReference.Builder setReferencingColumn(String referencingColumn); + + /** Creates a {@code ColumnReference} object. */ + public abstract ColumnReference build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Connection.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Connection.java new file mode 100644 index 0000000000..83ea0fc0dc --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Connection.java @@ -0,0 +1,192 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.api.core.BetaApi; +import com.google.common.util.concurrent.ListenableFuture; +import java.util.List; +import java.util.Map; + +/** + * A Connection is a session between a Java application and BigQuery. SQL statements are executed + * and results are returned within the context of a connection. + */ +public interface Connection { + + /** Sends a query cancel request. This call will return immediately */ + @BetaApi + boolean close() throws BigQuerySQLException; + + /** + * Execute a query dry run that returns information on the schema and query parameters of the + * query results. + * + * @param sql typically a static SQL SELECT statement + * @exception BigQuerySQLException if a database access error occurs + */ + @BetaApi + BigQueryDryRunResult dryRun(String sql) throws BigQuerySQLException; + + /** + * Execute a SQL statement that returns a single ResultSet. + * + *

    Example of running a query. + * + *

    +   * {
    +   *   @code
    +   *   ConnectionSettings connectionSettings =
    +   *        ConnectionSettings.newBuilder()
    +   *            .setRequestTimeout(10L)
    +   *            .setMaxResults(100L)
    +   *            .setUseQueryCache(true)
    +   *            .build();
    +   *   Connection connection = bigquery.createConnection(connectionSettings);
    +   *   String selectQuery = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
    +   *   BigQueryResult bqResultSet = connection.executeSelect(selectQuery)
    +   *   ResultSet rs = bqResultSet.getResultSet();
    +   *   while (rs.next()) {
    +   *       System.out.printf("%s,", rs.getString("corpus"));
    +   *   }
    +   * 
    + * + * @param sql a static SQL SELECT statement + * @return a ResultSet that contains the data produced by the query + * @exception BigQuerySQLException if a database access error occurs + */ + @BetaApi + BigQueryResult executeSelect(String sql) throws BigQuerySQLException; + + /** + * This method executes a SQL SELECT query + * + * @param sql SQL SELECT query + * @param parameters named or positional parameters. The set of query parameters must either be + * all positional or all named parameters. + * @param labels (optional) the labels associated with this query. You can use these to organize + * and group your query jobs. Label keys and values can be no longer than 63 characters, can + * only contain lowercase letters, numeric characters, underscores and dashes. International + * characters are allowed. Label values are optional and Label is a Varargs. You should pass + * all the Labels in a single Map .Label keys must start with a letter and each label in the + * list must have a different key. + * @return BigQueryResult containing the output of the query + * @throws BigQuerySQLException + */ + @BetaApi + BigQueryResult executeSelect( + String sql, List parameters, Map... labels) + throws BigQuerySQLException; + + /** + * Execute a SQL statement that returns a single ResultSet and returns a ListenableFuture to + * process the response asynchronously. + * + *

    Example of running a query. + * + *

    +   * {
    +   *   @code
    +   *  ConnectionSettings connectionSettings =
    +   *        ConnectionSettings.newBuilder()
    +   *            .setUseReadAPI(true)
    +   *            .build();
    +   *   Connection connection = bigquery.createConnection(connectionSettings);
    +   *   String selectQuery = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
    +   * ListenableFuture<ExecuteSelectResponse> executeSelectFuture = connection.executeSelectAsync(selectQuery);
    +   * ExecuteSelectResponse executeSelectRes = executeSelectFuture.get();
    +   *
    +   *  if(!executeSelectRes.getIsSuccessful()){
    +   * throw executeSelectRes.getBigQuerySQLException();
    +   * }
    +   *
    +   *  BigQueryResult bigQueryResult = executeSelectRes.getBigQueryResult();
    +   * ResultSet rs = bigQueryResult.getResultSet();
    +   * while (rs.next()) {
    +   * System.out.println(rs.getString(1));
    +   * }
    +   *
    +   * 
    + * + * @param sql a static SQL SELECT statement + * @return a ListenableFuture that is used to get the data produced by the query + * @throws BigQuerySQLException upon failure + */ + @BetaApi + ListenableFuture executeSelectAsync(String sql) + throws BigQuerySQLException; + + /** + * Execute a SQL statement that returns a single ResultSet and returns a ListenableFuture to + * process the response asynchronously. + * + *

    Example of running a query. + * + *

    +   * {
    +   *   @code
    +   *  ConnectionSettings connectionSettings =
    +   *        ConnectionSettings.newBuilder()
    +   *            .setUseReadAPI(true)
    +   *            .build();
    +   *   Connection connection = bigquery.createConnection(connectionSettings);
    +   *     String selectQuery =
    +   *         "SELECT TimestampField, StringField, BooleanField FROM "
    +   *             + MY_TABLE
    +   *             + " WHERE StringField = @stringParam"
    +   *             + " AND IntegerField IN UNNEST(@integerList)";
    +   *     QueryParameterValue stringParameter = QueryParameterValue.string("stringValue");
    +   *     QueryParameterValue intArrayParameter =
    +   *         QueryParameterValue.array(new Integer[] {3, 4}, Integer.class);
    +   *     Parameter stringParam =
    +   *         Parameter.newBuilder().setName("stringParam").setValue(stringParameter).build();
    +   *     Parameter intArrayParam =
    +   *         Parameter.newBuilder().setName("integerList").setValue(intArrayParameter).build();
    +   *     List<Parameter> parameters = ImmutableList.of(stringParam, intArrayParam);
    +   *
    +   *     ListenableFuture<ExecuteSelectResponse> executeSelectFut =
    +   *         connection.executeSelectAsync(selectQuery, parameters);
    +   * ExecuteSelectResponse executeSelectRes = executeSelectFuture.get();
    +   *
    +   *  if(!executeSelectRes.getIsSuccessful()){
    +   * throw executeSelectRes.getBigQuerySQLException();
    +   * }
    +   *
    +   *  BigQueryResult bigQueryResult = executeSelectRes.getBigQueryResult();
    +   * ResultSet rs = bigQueryResult.getResultSet();
    +   * while (rs.next()) {
    +   * System.out.println(rs.getString(1));
    +   * }
    +   *
    +   * 
    + * + * @param sql SQL SELECT query + * @param parameters named or positional parameters. The set of query parameters must either be + * all positional or all named parameters. + * @param labels (optional) the labels associated with this query. You can use these to organize + * and group your query jobs. Label keys and values can be no longer than 63 characters, can + * only contain lowercase letters, numeric characters, underscores and dashes. International + * characters are allowed. Label values are optional and Label is a Varargs. You should pass + * all the Labels in a single Map .Label keys must start with a letter and each label in the + * list must have a different key. + * @return a ListenableFuture that is used to get the data produced by the query + * @throws BigQuerySQLException upon failure + */ + @BetaApi + ListenableFuture executeSelectAsync( + String sql, List parameters, Map... labels) + throws BigQuerySQLException; +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java new file mode 100644 index 0000000000..d31a406e40 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionImpl.java @@ -0,0 +1,1570 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static java.net.HttpURLConnection.HTTP_NOT_FOUND; + +import com.google.api.core.BetaApi; +import com.google.api.core.InternalApi; +import com.google.api.gax.core.FixedCredentialsProvider; +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.JobConfigurationQuery; +import com.google.api.services.bigquery.model.QueryParameter; +import com.google.api.services.bigquery.model.QueryRequest; +import com.google.api.services.bigquery.model.TableDataList; +import com.google.api.services.bigquery.model.TableRow; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQueryRetryHelper.BigQueryRetryHelperException; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.SessionInfo; +import com.google.cloud.bigquery.spi.v2.BigQueryRpc; +import com.google.cloud.bigquery.storage.v1.ArrowRecordBatch; +import com.google.cloud.bigquery.storage.v1.ArrowSchema; +import com.google.cloud.bigquery.storage.v1.BigQueryReadClient; +import com.google.cloud.bigquery.storage.v1.BigQueryReadSettings; +import com.google.cloud.bigquery.storage.v1.CreateReadSessionRequest; +import com.google.cloud.bigquery.storage.v1.DataFormat; +import com.google.cloud.bigquery.storage.v1.ReadRowsRequest; +import com.google.cloud.bigquery.storage.v1.ReadRowsResponse; +import com.google.cloud.bigquery.storage.v1.ReadSession; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.base.Function; +import com.google.common.base.Strings; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Iterables; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.util.concurrent.FutureCallback; +import com.google.common.util.concurrent.Futures; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.common.util.concurrent.ListeningExecutorService; +import com.google.common.util.concurrent.MoreExecutors; +import java.io.IOException; +import java.math.BigInteger; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Queue; +import java.util.UUID; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.TimeUnit; +import java.util.logging.Level; +import java.util.logging.Logger; +import java.util.stream.Collectors; +import org.apache.arrow.memory.BufferAllocator; +import org.apache.arrow.memory.RootAllocator; +import org.apache.arrow.vector.FieldVector; +import org.apache.arrow.vector.VectorLoader; +import org.apache.arrow.vector.VectorSchemaRoot; +import org.apache.arrow.vector.ipc.ReadChannel; +import org.apache.arrow.vector.ipc.message.MessageSerializer; +import org.apache.arrow.vector.types.pojo.Field; +import org.apache.arrow.vector.util.ByteArrayReadableSeekableByteChannel; + +/** Implementation for {@link Connection}, the generic BigQuery connection API (not JDBC). */ +class ConnectionImpl implements Connection { + + private final ConnectionSettings connectionSettings; + private final BigQueryOptions bigQueryOptions; + private final BigQueryRpc bigQueryRpc; + private final BigQueryRetryConfig retryConfig; + private final int bufferSize; // buffer size in Producer Thread + private final int MAX_PROCESS_QUERY_THREADS_CNT = 5; + private final ExecutorService queryTaskExecutor = + Executors.newFixedThreadPool(MAX_PROCESS_QUERY_THREADS_CNT); + private final Logger logger = Logger.getLogger(this.getClass().getName()); + private BigQueryReadClient bqReadClient; + private static final long EXECUTOR_TIMEOUT_SEC = 10; + private static final long BIGQUERY_TIMEOUT_SEC = 10; + private BlockingQueue> + bufferFvl; // initialized lazily iff we end up using the tabledata.list end point + private BlockingQueue + bufferRow; // initialized lazily iff we end up using Read API + private static final BigQueryRetryConfig EMPTY_RETRY_CONFIG = + BigQueryRetryConfig.newBuilder().build(); + + ConnectionImpl( + ConnectionSettings connectionSettings, + BigQueryOptions bigQueryOptions, + BigQueryRpc bigQueryRpc, + BigQueryRetryConfig retryConfig) { + this.connectionSettings = connectionSettings; + this.bigQueryOptions = bigQueryOptions; + this.bigQueryRpc = bigQueryRpc; + this.retryConfig = retryConfig; + // Sets a reasonable buffer size (a blocking queue) if user input is suboptimal + this.bufferSize = + (connectionSettings == null + || connectionSettings.getNumBufferedRows() == null + || connectionSettings.getNumBufferedRows() < 10000 + ? 20000 + : Math.min(connectionSettings.getNumBufferedRows() * 2, 100000)); + } + + /** + * This method returns the number of records to be stored in the buffer and it ensures that it is + * between a reasonable range + * + * @return The max number of records to be stored in the buffer + */ + private int getBufferSize() { + return (connectionSettings == null + || connectionSettings.getNumBufferedRows() == null + || connectionSettings.getNumBufferedRows() < 10000 + ? 20000 + : Math.min(connectionSettings.getNumBufferedRows() * 2, 100000)); + } + + /** + * Cancel method shutdowns the pageFetcher and producerWorker threads gracefully using interrupt. + * The pageFetcher threat will not request for any subsequent threads after interrupting and + * shutdown as soon as any ongoing RPC call returns. The producerWorker will not populate the + * buffer with any further records and clear the buffer, put a EoF marker and shutdown. + * + * @return Boolean value true if the threads were interrupted + * @throws BigQuerySQLException + */ + @BetaApi + @Override + public synchronized boolean close() throws BigQuerySQLException { + flagEndOfStream(); // an End of Stream flag in the buffer so that the `ResultSet.next()` stops + // advancing the cursor + queryTaskExecutor.shutdownNow(); + boolean isBqReadClientTerminated = true; + try { + if (bqReadClient != null) { + bqReadClient.shutdownNow(); + isBqReadClientTerminated = + bqReadClient.awaitTermination(BIGQUERY_TIMEOUT_SEC, TimeUnit.SECONDS); + } + if (queryTaskExecutor.awaitTermination(EXECUTOR_TIMEOUT_SEC, TimeUnit.SECONDS) + && isBqReadClientTerminated) { + return true; + } // else queryTaskExecutor.isShutdown() will be returned outside this try block + } catch (InterruptedException e) { + logger.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Exception while awaitTermination", + e); // Logging InterruptedException instead of throwing the exception back, close method + // will return queryTaskExecutor.isShutdown() + } + + return queryTaskExecutor.isShutdown() + && isBqReadClientTerminated; // check if the executor has been shutdown + } + + /** + * This method runs a dry run query + * + * @param sql SQL SELECT statement + * @return BigQueryDryRunResult containing List and Schema + * @throws BigQuerySQLException + */ + @BetaApi + @Override + public BigQueryDryRunResult dryRun(String sql) throws BigQuerySQLException { + com.google.api.services.bigquery.model.Job dryRunJob = createDryRunJob(sql); + Schema schema = Schema.fromPb(dryRunJob.getStatistics().getQuery().getSchema()); + List queryParametersPb = + dryRunJob.getStatistics().getQuery().getUndeclaredQueryParameters(); + List queryParameters = + queryParametersPb == null + ? Collections.emptyList() + : Lists.transform(queryParametersPb, QUERY_PARAMETER_FROM_PB_FUNCTION); + QueryStatistics queryStatistics = JobStatistics.fromPb(dryRunJob); + SessionInfo sessionInfo = + queryStatistics.getSessionInfo() == null ? null : queryStatistics.getSessionInfo(); + BigQueryResultStats bigQueryResultStats = + new BigQueryResultStatsImpl(queryStatistics, sessionInfo); + return new BigQueryDryRunResultImpl(schema, queryParameters, bigQueryResultStats); + } + + /** + * This method executes a SQL SELECT query + * + * @param sql SQL SELECT statement + * @return BigQueryResult containing the output of the query + * @throws BigQuerySQLException + */ + @BetaApi + @Override + public BigQueryResult executeSelect(String sql) throws BigQuerySQLException { + return getExecuteSelectResponse(sql, null, null); + } + + /** + * This method executes a SQL SELECT query + * + * @param sql SQL SELECT query + * @param parameters named or positional parameters. The set of query parameters must either be + * all positional or all named parameters. + * @param labels the labels associated with this query. You can use these to organize and group + * your query jobs. Label keys and values can be no longer than 63 characters, can only + * contain lowercase letters, numeric characters, underscores and dashes. International + * characters are allowed. Label values are optional and Label is a Varargs. You should pass + * all the Labels in a single Map .Label keys must start with a letter and each label in the + * list must have a different key. + * @return BigQueryResult containing the output of the query + * @throws BigQuerySQLException + */ + @BetaApi + @Override + public BigQueryResult executeSelect( + String sql, List parameters, Map... labels) + throws BigQuerySQLException { + return getExecuteSelectResponse(sql, parameters, labels); + } + + private BigQueryResult getExecuteSelectResponse( + String sql, List parameters, Map... labels) + throws BigQuerySQLException { + Map labelMap = null; + if (labels != null + && labels.length == 1) { // We expect label as a key value pair in a single Map + labelMap = labels[0]; + } + try { + // use jobs.query if possible + if (isFastQuerySupported()) { + logger.log(Level.INFO, "\n Using Fast Query Path"); + final String projectId = bigQueryOptions.getProjectId(); + final QueryRequest queryRequest = + createQueryRequest(connectionSettings, sql, parameters, labelMap); + return queryRpc(projectId, queryRequest, sql, parameters != null); + } + // use jobs.insert otherwise + logger.log(Level.INFO, "\n Not Using Fast Query Path, using jobs.insert"); + com.google.api.services.bigquery.model.Job queryJob = + createQueryJob(sql, connectionSettings, parameters, labelMap); + JobId jobId = JobId.fromPb(queryJob.getJobReference()); + GetQueryResultsResponse firstPage = getQueryResultsFirstPage(jobId); + return getResultSet(firstPage, jobId, sql, parameters != null); + } catch (BigQueryException e) { + throw new BigQuerySQLException(e.getMessage(), e, e.getErrors()); + } + } + + /** + * Execute a SQL statement that returns a single ResultSet and returns a ListenableFuture to + * process the response asynchronously. + * + *

    Example of running a query. + * + *

    +   * {
    +   *   @code
    +   *  ConnectionSettings connectionSettings =
    +   *        ConnectionSettings.newBuilder()
    +   *            .setUseReadAPI(true)
    +   *            .build();
    +   *   Connection connection = bigquery.createConnection(connectionSettings);
    +   *   String selectQuery = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
    +   * ListenableFuture executeSelectFuture = connection.executeSelectAsync(selectQuery);
    +   * ExecuteSelectResponse executeSelectRes = executeSelectFuture.get();
    +   *
    +   *  if(!executeSelectRes.getIsSuccessful()){
    +   * throw executeSelectRes.getBigQuerySQLException();
    +   * }
    +   *
    +   *  BigQueryResult bigQueryResult = executeSelectRes.getBigQueryResult();
    +   * ResultSet rs = bigQueryResult.getResultSet();
    +   * while (rs.next()) {
    +   * System.out.println(rs.getString(1));
    +   * }
    +   *
    +   * 
    + * + * @param sql a static SQL SELECT statement + * @return a ListenableFuture that is used to get the data produced by the query + * @throws BigQuerySQLException upon failure + */ + @BetaApi + @Override + public ListenableFuture executeSelectAsync(String sql) + throws BigQuerySQLException { + return getExecuteSelectFuture(sql, null); + } + + /** This method calls the overloaded executeSelect(...) methods and returns a Future */ + private ListenableFuture getExecuteSelectFuture( + String sql, List parameters, Map... labels) + throws BigQuerySQLException { + ExecutorService execService = + Executors.newFixedThreadPool( + 2); // two fixed threads. One for the async operation and the other for processing the + // callback + ListeningExecutorService lExecService = MoreExecutors.listeningDecorator(execService); + ListenableFuture executeSelectFuture = + lExecService.submit( + () -> { + try { + return ExecuteSelectResponse.newBuilder() + .setResultSet( + this.executeSelect( + sql, + parameters, + labels)) // calling the overloaded executeSelect method, it takes care + // of null parameters and labels + .setIsSuccessful(true) + .build(); + } catch (BigQuerySQLException ex) { + return ExecuteSelectResponse + .newBuilder() // passing back the null result with isSuccessful set to false + .setIsSuccessful(false) + .setBigQuerySQLException(ex) + .build(); + } + }); + + Futures.addCallback( + executeSelectFuture, + new FutureCallback() { + public void onSuccess(ExecuteSelectResponse result) { + execService.shutdownNow(); // shutdown the executor service as we do not need it + } + + public void onFailure(Throwable t) { + logger.log( + Level.WARNING, + "\n" + + String.format( + "Async task failed or cancelled with error %s", t.getMessage())); + try { + close(); // attempt to stop the execution as the developer might have called + // Future.cancel() + } catch (BigQuerySQLException e) { + logger.log( + Level.WARNING, + "\n" + + String.format("Exception while closing the connection %s", e.getMessage())); + } + execService.shutdownNow(); // shutdown the executor service as we do not need it + } + }, + execService); + + return executeSelectFuture; + } + + /** + * Execute a SQL statement that returns a single ResultSet and returns a ListenableFuture to + * process the response asynchronously. + * + *

    Example of running a query. + * + *

    +   * {
    +   *   @code
    +   *  ConnectionSettings connectionSettings =
    +   *        ConnectionSettings.newBuilder()
    +   *            ..setUseReadAPI(true)
    +   *            .build();
    +   *   Connection connection = bigquery.createConnection(connectionSettings);
    +   *     String selectQuery =
    +   *         "SELECT TimestampField, StringField, BooleanField FROM "
    +   *             + MY_TABLE
    +   *             + " WHERE StringField = @stringParam"
    +   *             + " AND IntegerField IN UNNEST(@integerList)";
    +   *     QueryParameterValue stringParameter = QueryParameterValue.string("stringValue");
    +   *     QueryParameterValue intArrayParameter =
    +   *         QueryParameterValue.array(new Integer[] {3, 4}, Integer.class);
    +   *     Parameter stringParam =
    +   *         Parameter.newBuilder().setName("stringParam").setValue(stringParameter).build();
    +   *     Parameter intArrayParam =
    +   *         Parameter.newBuilder().setName("integerList").setValue(intArrayParameter).build();
    +   *     List parameters = ImmutableList.of(stringParam, intArrayParam);
    +   *
    +   *     ListenableFuture executeSelectFut =
    +   *         connection.executeSelectAsync(selectQuery, parameters);
    +   * ExecuteSelectResponse executeSelectRes = executeSelectFuture.get();
    +   *
    +   *  if(!executeSelectRes.getIsSuccessful()){
    +   * throw executeSelectRes.getBigQuerySQLException();
    +   * }
    +   *
    +   *  BigQueryResult bigQueryResult = executeSelectRes.getBigQueryResult();
    +   * ResultSet rs = bigQueryResult.getResultSet();
    +   * while (rs.next()) {
    +   * System.out.println(rs.getString(1));
    +   * }
    +   *
    +   * 
    + * + * @param sql SQL SELECT query + * @param parameters named or positional parameters. The set of query parameters must either be + * all positional or all named parameters. + * @param labels (optional) the labels associated with this query. You can use these to organize + * and group your query jobs. Label keys and values can be no longer than 63 characters, can + * only contain lowercase letters, numeric characters, underscores and dashes. International + * characters are allowed. Label values are optional and Label is a Varargs. You should pass + * all the Labels in a single Map .Label keys must start with a letter and each label in the + * list must have a different key. + * @return a ListenableFuture that is used to get the data produced by the query + * @throws BigQuerySQLException upon failure + */ + @BetaApi + @Override + public ListenableFuture executeSelectAsync( + String sql, List parameters, Map... labels) + throws BigQuerySQLException { + return getExecuteSelectFuture(sql, parameters, labels); + } + + @VisibleForTesting + BigQueryResult getResultSet( + GetQueryResultsResponse firstPage, JobId jobId, String sql, Boolean hasQueryParameters) { + if (firstPage.getTotalRows().compareTo(BigInteger.ZERO) > 0) { + return getSubsequentQueryResultsWithJob( + firstPage.getTotalRows().longValue(), + (long) firstPage.getRows().size(), + jobId, + firstPage, + hasQueryParameters); + } + return new BigQueryResultImpl(Schema.fromPb(firstPage.getSchema()), 0, null, null); + } + + static class EndOfFieldValueList + extends AbstractList< + FieldValue> { // A reference of this class is used as a token to inform the thread + // consuming `buffer` BigQueryResultImpl that we have run out of records + @Override + public FieldValue get(int index) { + return null; + } + + @Override + public int size() { + return 0; + } + } + + private BigQueryResult queryRpc( + final String projectId, + final QueryRequest queryRequest, + String sql, + Boolean hasQueryParameters) { + com.google.api.services.bigquery.model.QueryResponse results; + try { + results = + BigQueryRetryHelper.runWithRetries( + () -> + bigQueryOptions + .getBigQueryRpcV2() + .queryRpcSkipExceptionTranslation(projectId, queryRequest), + bigQueryOptions.getRetrySettings(), + bigQueryOptions.getResultRetryAlgorithm(), + bigQueryOptions.getClock(), + retryConfig, + false, + null); + } catch (BigQueryRetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + + if (results.getErrors() != null) { + List bigQueryErrors = + results.getErrors().stream() + .map(BigQueryError.FROM_PB_FUNCTION) + .collect(Collectors.toList()); + // Throwing BigQueryException since there may be no JobId, and we want to stay consistent + // with the case where there is an HTTP error + throw new BigQueryException(bigQueryErrors); + } + + // Query finished running and we can paginate all the results + // Results should be read using the high throughput read API if sufficiently large. + boolean resultsLargeEnoughForReadApi = + connectionSettings.getUseReadAPI() + && results.getTotalRows() != null + && results.getTotalRows().longValue() > connectionSettings.getMinResultSize(); + if (results.getJobComplete() && results.getSchema() != null && !resultsLargeEnoughForReadApi) { + return processQueryResponseResults(results); + } else { + // Query is long-running (> 10s) and hasn't completed yet, query completed but didn't + // return the schema, or results are sufficiently large to use the high throughput read API, + // fallback to jobs.insert path. Some operations don't return the schema and can be optimized + // here, but this is left as future work. + JobId jobId = JobId.fromPb(results.getJobReference()); + GetQueryResultsResponse firstPage = getQueryResultsFirstPage(jobId); + Long totalRows = + firstPage.getTotalRows() == null ? null : firstPage.getTotalRows().longValue(); + Long pageRows = firstPage.getRows() == null ? null : (long) (firstPage.getRows().size()); + logger.log( + Level.WARNING, + "\n" + + String.format( + "results.getJobComplete(): %s, isSchemaNull: %s , totalRows: %s, pageRows: %s", + results.getJobComplete(), results.getSchema() == null, totalRows, pageRows)); + return getSubsequentQueryResultsWithJob( + totalRows, pageRows, jobId, firstPage, hasQueryParameters); + } + } + + @VisibleForTesting + BigQueryResultStats getBigQueryResultSetStats(JobId jobId) { + // Create GetQueryResultsResponse query statistics + Job queryJob = getQueryJobRpc(jobId); + QueryStatistics queryStatistics = queryJob.getStatistics(); + SessionInfo sessionInfo = + queryStatistics.getSessionInfo() == null ? null : queryStatistics.getSessionInfo(); + return new BigQueryResultStatsImpl(queryStatistics, sessionInfo); + } + + /* This method processed the first page of GetQueryResultsResponse and then it uses tabledata.list */ + @VisibleForTesting + BigQueryResult tableDataList(GetQueryResultsResponse firstPage, JobId jobId) { + Schema schema; + long numRows; + schema = Schema.fromPb(firstPage.getSchema()); + numRows = firstPage.getTotalRows().longValue(); + + BigQueryResultStats bigQueryResultStats = getBigQueryResultSetStats(jobId); + + // Keeps the deserialized records at the row level, which is consumed by BigQueryResult + bufferFvl = new LinkedBlockingDeque<>(getBufferSize()); + + // Keeps the parsed FieldValueLists + BlockingQueue, Boolean>> pageCache = + new LinkedBlockingDeque<>( + getPageCacheSize(connectionSettings.getNumBufferedRows(), schema)); + + // Keeps the raw RPC responses + BlockingQueue> rpcResponseQueue = + new LinkedBlockingDeque<>( + getPageCacheSize(connectionSettings.getNumBufferedRows(), schema)); + + runNextPageTaskAsync(firstPage.getPageToken(), getDestinationTable(jobId), rpcResponseQueue); + + parseRpcDataAsync( + firstPage.getRows(), + schema, + pageCache, + rpcResponseQueue); // parses data on a separate thread, thus maximising processing + // throughput + + populateBufferAsync( + rpcResponseQueue, pageCache, bufferFvl); // spawns a thread to populate the buffer + + // This will work for pagination as well, as buffer is getting updated asynchronously + return new BigQueryResultImpl>( + schema, numRows, bufferFvl, bigQueryResultStats); + } + + @VisibleForTesting + BigQueryResult processQueryResponseResults( + com.google.api.services.bigquery.model.QueryResponse results) { + Schema schema; + long numRows; + schema = Schema.fromPb(results.getSchema()); + numRows = + results.getTotalRows() == null + ? 0 + : results.getTotalRows().longValue(); // in case of DML or DDL + // QueryResponse only provides cache hits, dmlStats, and sessionInfo as query processing + // statistics + DmlStats dmlStats = + results.getDmlStats() == null ? null : DmlStats.fromPb(results.getDmlStats()); + Boolean cacheHit = results.getCacheHit(); + QueryStatistics queryStatistics = + QueryStatistics.newBuilder().setDmlStats(dmlStats).setCacheHit(cacheHit).build(); + // We cannot directly set sessionInfo in QueryStatistics + SessionInfo sessionInfo = + results.getSessionInfo() == null + ? null + : JobStatistics.SessionInfo.fromPb(results.getSessionInfo()); + BigQueryResultStats bigQueryResultStats = + new BigQueryResultStatsImpl(queryStatistics, sessionInfo); + + bufferFvl = new LinkedBlockingDeque<>(getBufferSize()); + BlockingQueue, Boolean>> pageCache = + new LinkedBlockingDeque<>( + getPageCacheSize(connectionSettings.getNumBufferedRows(), schema)); + BlockingQueue> rpcResponseQueue = + new LinkedBlockingDeque<>( + getPageCacheSize(connectionSettings.getNumBufferedRows(), schema)); + + JobId jobId = JobId.fromPb(results.getJobReference()); + + // Thread to make rpc calls to fetch data from the server + runNextPageTaskAsync(results.getPageToken(), getDestinationTable(jobId), rpcResponseQueue); + + // Thread to parse data received from the server to client library objects + parseRpcDataAsync(results.getRows(), schema, pageCache, rpcResponseQueue); + + // Thread to populate the buffer (a blocking queue) shared with the consumer + populateBufferAsync(rpcResponseQueue, pageCache, bufferFvl); + + return new BigQueryResultImpl>( + schema, numRows, bufferFvl, bigQueryResultStats); + } + + @VisibleForTesting + void runNextPageTaskAsync( + String firstPageToken, + TableId destinationTable, + BlockingQueue> rpcResponseQueue) { + // This thread makes the RPC calls and paginates + Runnable nextPageTask = + () -> { + String pageToken = firstPageToken; // results.getPageToken(); + try { + while (pageToken != null) { // paginate for non null token + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor.isShutdown()) { // do not process further pages and shutdown + logger.log( + Level.WARNING, + "\n" + + Thread.currentThread().getName() + + " Interrupted @ runNextPageTaskAsync"); + break; + } + TableDataList tabledataList = tableDataListRpc(destinationTable, pageToken); + pageToken = tabledataList.getPageToken(); + rpcResponseQueue.put( + Tuple.of( + tabledataList, + true)); // this will be parsed asynchronously without blocking the current + // thread + } + rpcResponseQueue.put( + Tuple.of( + null, false)); // this will stop the parseDataTask as well when the pagination + // completes + } catch (Exception e) { + throw new BigQueryException(0, e.getMessage(), e); + } // We cannot do queryTaskExecutor.shutdownNow() here as populate buffer method may not + // have finished processing the records and even that will be interrupted + }; + queryTaskExecutor.execute(nextPageTask); + } + + /* + This method takes TableDataList from rpcResponseQueue and populates pageCache with FieldValueList + */ + @VisibleForTesting + void parseRpcDataAsync( + // com.google.api.services.bigquery.model.QueryResponse results, + List tableRows, + Schema schema, + BlockingQueue, Boolean>> pageCache, + BlockingQueue> rpcResponseQueue) { + + // parse and put the first page in the pageCache before the other pages are parsed from the RPC + // calls + Iterable firstFieldValueLists = getIterableFieldValueList(tableRows, schema); + try { + pageCache.put( + Tuple.of(firstFieldValueLists, true)); // this is the first page which we have received. + } catch (InterruptedException e) { + logger.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ parseRpcDataAsync"); + } + + // rpcResponseQueue will get null tuple if Cancel method is called, so no need to explicitly use + // thread interrupt here + Runnable parseDataTask = + () -> { + try { + boolean hasMorePages = true; + while (hasMorePages) { + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor.isShutdown()) { // do not process further data and shutdown + logger.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ parseRpcDataAsync"); + break; + } + // no interrupt received till this point, continue processing + Tuple rpcResponse = rpcResponseQueue.take(); + TableDataList tabledataList = rpcResponse.x(); + hasMorePages = rpcResponse.y(); + if (tabledataList != null) { + Iterable fieldValueLists = + getIterableFieldValueList(tabledataList.getRows(), schema); // Parse + pageCache.put(Tuple.of(fieldValueLists, true)); + } + } + } catch (InterruptedException e) { + logger.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ parseRpcDataAsync", + e); // Thread might get interrupted while calling the Cancel method, which is + // expected, so logging this instead of throwing the exception back + } + try { + pageCache.put(Tuple.of(null, false)); // no further pages, graceful exit scenario + } catch (InterruptedException e) { + logger.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ parseRpcDataAsync", + e); // Thread might get interrupted while calling the Cancel method, which is + // expected, so logging this instead of throwing the exception back + } // We cannot do queryTaskExecutor.shutdownNow() here as populate buffer method may not + // have finished processing the records and even that will be interrupted + }; + queryTaskExecutor.execute(parseDataTask); + } + + @VisibleForTesting + void populateBufferAsync( + BlockingQueue> rpcResponseQueue, + BlockingQueue, Boolean>> pageCache, + BlockingQueue> buffer) { + Runnable populateBufferRunnable = + () -> { // producer thread populating the buffer + Iterable fieldValueLists = null; + boolean hasRows = true; // as we have to process the first page + while (hasRows) { + try { + Tuple, Boolean> nextPageTuple = pageCache.take(); + hasRows = nextPageTuple.y(); + fieldValueLists = nextPageTuple.x(); + } catch (InterruptedException e) { + logger.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted", + e); // Thread might get interrupted while calling the Cancel method, which is + // expected, so logging this instead of throwing the exception back + break; + } + + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor.isShutdown() + || fieldValueLists + == null) { // do not process further pages and shutdown (outerloop) + break; + } + + for (FieldValueList fieldValueList : fieldValueLists) { + try { + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor + .isShutdown()) { // do not process further pages and shutdown (inner loop) + break; + } + buffer.put(fieldValueList); + } catch (InterruptedException e) { + throw new BigQueryException(0, e.getMessage(), e); + } + } + } + try { + buffer.put( + new EndOfFieldValueList()); // All the pages has been processed, put this marker + } catch (InterruptedException e) { + logger.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ populateBufferAsync", + e); + } finally { + queryTaskExecutor + .shutdownNow(); // Shutdown the thread pool. All the records are now processed + } + }; + + queryTaskExecutor.execute(populateBufferRunnable); + } + + /** + * In an interrupt scenario, like when the background threads are still working and the user calls + * `connection.close() then we need to add an End of Stream flag in the buffer so that the + * `ResultSet.next()` stops advancing the cursor. We cannot rely on the `populateBufferAsync` + * method to do this as the `BlockingQueue.put()` call will error out after the interrupt is + * triggerred + */ + @InternalApi + void flagEndOfStream() { // package-private + try { + if (bufferFvl != null) { // that is tabledata.list endpoint is used + bufferFvl.put( + new EndOfFieldValueList()); // All the pages has been processed, put this marker + } else if (bufferRow != null) { + bufferRow.put( + new BigQueryResultImpl.Row( + null, true)); // All the pages has been processed, put this marker + } else { + logger.log( + Level.WARNING, + "\n" + + Thread.currentThread().getName() + + " Could not flag End of Stream, both the buffer types are null. This might happen when the connection is close without executing a query"); + } + } catch (InterruptedException e) { + logger.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ flagEndOfStream", + e); + } + } + + /* Helper method that parse and populate a page with TableRows */ + private static Iterable getIterableFieldValueList( + Iterable tableDataPb, final Schema schema) { + return ImmutableList.copyOf( + Iterables.transform( + tableDataPb != null ? tableDataPb : ImmutableList.of(), + new Function() { + final FieldList fields = schema != null ? schema.getFields() : null; + + @Override + public FieldValueList apply(TableRow rowPb) { + return FieldValueList.fromPb(rowPb.getF(), fields); + } + })); + } + + /* Helper method that determines the optimal number of caches pages to improve read performance */ + @VisibleForTesting + int getPageCacheSize(Integer numBufferedRows, Schema schema) { + final int MIN_CACHE_SIZE = 3; // Min number of pages to cache + final int MAX_CACHE_SIZE = 20; // //Min number of pages to cache + int numColumns = schema.getFields().size(); + int numCachedPages; + long numCachedRows = numBufferedRows == null ? 0 : numBufferedRows.longValue(); + + // TODO: Further enhance this logic depending on customer feedback on memory consumption + if (numCachedRows > 10000) { + numCachedPages = + 2; // the size of numBufferedRows is quite large and as per our tests we should be able to + // do enough even with low + } else if (numColumns > 15 + && numCachedRows + > 5000) { // too many fields are being read, setting the page size on the lower end + numCachedPages = 3; + } else if (numCachedRows < 2000 + && numColumns < 15) { // low pagesize with fewer number of columns, we can cache more pages + numCachedPages = 20; + } else { // default - under 10K numCachedRows with any number of columns + numCachedPages = 5; + } + return numCachedPages < MIN_CACHE_SIZE + ? MIN_CACHE_SIZE + : (Math.min( + numCachedPages, + MAX_CACHE_SIZE)); // numCachedPages should be between the defined min and max + } + + /* Returns query results using either tabledata.list or the high throughput Read API */ + @VisibleForTesting + BigQueryResult getSubsequentQueryResultsWithJob( + Long totalRows, + Long pageRows, + JobId jobId, + GetQueryResultsResponse firstPage, + Boolean hasQueryParameters) { + TableId destinationTable = getDestinationTable(jobId); + return useReadAPI(totalRows, pageRows, Schema.fromPb(firstPage.getSchema()), hasQueryParameters) + ? highThroughPutRead( + destinationTable, + firstPage.getTotalRows().longValue(), + Schema.fromPb(firstPage.getSchema()), + getBigQueryResultSetStats( + jobId)) // discord first page and stream the entire BigQueryResult using + // the Read API + : tableDataList(firstPage, jobId); + } + + /* Returns query results using either tabledata.list or the high throughput Read API */ + @VisibleForTesting + BigQueryResult getSubsequentQueryResultsWithJob( + Long totalRows, + Long pageRows, + JobId jobId, + GetQueryResultsResponse firstPage, + Schema schema, + Boolean hasQueryParameters) { + TableId destinationTable = getDestinationTable(jobId); + return useReadAPI(totalRows, pageRows, schema, hasQueryParameters) + ? highThroughPutRead( + destinationTable, + totalRows == null + ? -1L + : totalRows, // totalRows is null when the job is still running. TODO: Check if + // any workaround is possible + schema, + getBigQueryResultSetStats( + jobId)) // discord first page and stream the entire BigQueryResult using + // the Read API + : tableDataList(firstPage, jobId); + } + + /* Returns Job from jobId by calling the jobs.get API */ + private Job getQueryJobRpc(JobId jobId) { + final JobId completeJobId = + jobId + .setProjectId(bigQueryOptions.getProjectId()) + .setLocation( + jobId.getLocation() == null && bigQueryOptions.getLocation() != null + ? bigQueryOptions.getLocation() + : jobId.getLocation()); + com.google.api.services.bigquery.model.Job jobPb; + try { + jobPb = + BigQueryRetryHelper.runWithRetries( + () -> + bigQueryOptions + .getBigQueryRpcV2() + .getQueryJobSkipExceptionTranslation( + completeJobId.getProject(), + completeJobId.getJob(), + completeJobId.getLocation()), + bigQueryOptions.getRetrySettings(), + bigQueryOptions.getResultRetryAlgorithm(), + bigQueryOptions.getClock(), + EMPTY_RETRY_CONFIG, + false, + null); + } catch (BigQueryRetryHelperException e) { + if (e.getCause() instanceof BigQueryException) { + if (((BigQueryException) e.getCause()).getCode() == HTTP_NOT_FOUND) { + if (bigQueryOptions.getThrowNotFound()) { + throw new BigQueryException(HTTP_NOT_FOUND, "Query job not found"); + } + return null; + } + } + throw BigQueryException.translateAndThrow(e); + } + // getQueryJobSkipExceptionTranslation will never return null so this is safe. + return Job.fromPb(bigQueryOptions.getService(), jobPb); + } + + /* Returns the destinationTable from jobId by calling jobs.get API */ + @VisibleForTesting + TableId getDestinationTable(JobId jobId) { + Job job = getQueryJobRpc(jobId); + return ((QueryJobConfiguration) job.getConfiguration()).getDestinationTable(); + } + + @VisibleForTesting + TableDataList tableDataListRpc(TableId destinationTable, String pageToken) { + try { + final TableId completeTableId = + destinationTable.setProjectId( + Strings.isNullOrEmpty(destinationTable.getProject()) + ? bigQueryOptions.getProjectId() + : destinationTable.getProject()); + TableDataList results = + BigQueryRetryHelper.runWithRetries( + () -> + bigQueryOptions + .getBigQueryRpcV2() + .listTableDataWithRowLimitSkipExceptionTranslation( + completeTableId.getProject(), + completeTableId.getDataset(), + completeTableId.getTable(), + connectionSettings.getMaxResultPerPage(), + pageToken), + bigQueryOptions.getRetrySettings(), + bigQueryOptions.getResultRetryAlgorithm(), + bigQueryOptions.getClock(), + EMPTY_RETRY_CONFIG, + false, + null); + + return results; + } catch (BigQueryRetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + @VisibleForTesting + BigQueryResult highThroughPutRead( + TableId destinationTable, long totalRows, Schema schema, BigQueryResultStats stats) { + + try { + if (bqReadClient == null) { // if the read client isn't already initialized. Not thread safe. + BigQueryReadSettings settings = + BigQueryReadSettings.newBuilder() + .setCredentialsProvider( + FixedCredentialsProvider.create(bigQueryOptions.getCredentials())) + .build(); + bqReadClient = BigQueryReadClient.create(settings); + } + String parent = String.format("projects/%s", destinationTable.getProject()); + String srcTable = + String.format( + "projects/%s/datasets/%s/tables/%s", + destinationTable.getProject(), + destinationTable.getDataset(), + destinationTable.getTable()); + + // Read all the columns if the source table (temp table) and stream the data back in Arrow + // format + ReadSession.Builder sessionBuilder = + ReadSession.newBuilder().setTable(srcTable).setDataFormat(DataFormat.ARROW); + + CreateReadSessionRequest.Builder builder = + CreateReadSessionRequest.newBuilder() + .setParent(parent) + .setReadSession(sessionBuilder) + .setMaxStreamCount(1) // Currently just one stream is allowed + // DO a regex check using order by and use multiple streams + ; + ReadSession readSession = bqReadClient.createReadSession(builder.build()); + bufferRow = new LinkedBlockingDeque<>(getBufferSize()); + Map arrowNameToIndex = new HashMap<>(); + // deserialize and populate the buffer async, so that the client isn't blocked + processArrowStreamAsync( + readSession, + bufferRow, + new ArrowRowReader(readSession.getArrowSchema(), arrowNameToIndex), + schema); + + logger.log(Level.INFO, "\n Using BigQuery Read API"); + stats.getQueryStatistics().setUseReadApi(true); + return new BigQueryResultImpl(schema, totalRows, bufferRow, stats); + + } catch (IOException e) { + throw BigQueryException.translateAndThrow(e); + } + } + + private void processArrowStreamAsync( + ReadSession readSession, + BlockingQueue buffer, + ArrowRowReader reader, + Schema schema) { + + Runnable arrowStreamProcessor = + () -> { + try { + // Use the first stream to perform reading. + String streamName = readSession.getStreams(0).getName(); + ReadRowsRequest readRowsRequest = + ReadRowsRequest.newBuilder().setReadStream(streamName).build(); + + // Process each block of rows as they arrive and decode using our simple row reader. + com.google.api.gax.rpc.ServerStream stream = + bqReadClient.readRowsCallable().call(readRowsRequest); + for (ReadRowsResponse response : stream) { + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor.isShutdown()) { // do not process and shutdown + break; + } + reader.processRows(response.getArrowRecordBatch(), buffer, schema); + } + + } catch (Exception e) { + if (e instanceof InterruptedException || e.getCause() instanceof InterruptedException) { + // Log silently and let it fall through to 'finally' for cleanup. + // This is the "graceful shutdown". + logger.log( + Level.INFO, "Background thread interrupted (Connection Closed). Stopping."); + Thread.currentThread().interrupt(); + } else { + throw BigQueryException.translateAndThrow(e); + } + } finally { // logic needed for graceful shutdown + // marking end of stream + try { + buffer.put( + new BigQueryResultImpl.Row( + null, true)); // All the pages has been processed, put this marker + } catch (InterruptedException e) { + logger.log( + Level.WARNING, + "\n" + Thread.currentThread().getName() + " Interrupted @ markLast", + e); + } + bqReadClient.shutdownNow(); // Shutdown the read client + queryTaskExecutor.shutdownNow(); // Shutdown the thread pool + } + }; + + queryTaskExecutor.execute(arrowStreamProcessor); + } + + private class ArrowRowReader + implements AutoCloseable { // TODO: Update to recent version of Arrow to avoid memoryleak + + BufferAllocator allocator = new RootAllocator(Long.MAX_VALUE); + + // Decoder object will be reused to avoid re-allocation and too much garbage collection. + private final VectorSchemaRoot root; + private final VectorLoader loader; + + private ArrowRowReader(ArrowSchema arrowSchema, Map arrowNameToIndex) + throws IOException { + org.apache.arrow.vector.types.pojo.Schema schema = + MessageSerializer.deserializeSchema( + new org.apache.arrow.vector.ipc.ReadChannel( + new ByteArrayReadableSeekableByteChannel( + arrowSchema.getSerializedSchema().toByteArray()))); + List vectors = new ArrayList<>(); + List fields = schema.getFields(); + for (int i = 0; i < fields.size(); i++) { + vectors.add(fields.get(i).createVector(allocator)); + arrowNameToIndex.put( + fields.get(i).getName(), + i); // mapping for getting against the field name in the result set + } + root = new VectorSchemaRoot(vectors); + loader = new VectorLoader(root); + } + + /** + * @param batch object returned from the ReadRowsResponse. + */ + private void processRows( + ArrowRecordBatch batch, BlockingQueue buffer, Schema schema) + throws IOException { // deserialize the values and consume the hash of the values + try { + org.apache.arrow.vector.ipc.message.ArrowRecordBatch deserializedBatch = + MessageSerializer.deserializeRecordBatch( + new ReadChannel( + new ByteArrayReadableSeekableByteChannel( + batch.getSerializedRecordBatch().toByteArray())), + allocator); + + loader.load(deserializedBatch); + // Release buffers from batch (they are still held in the vectors in root). + deserializedBatch.close(); + + // Parse the vectors using BQ Schema. Deserialize the data at the row level and add it to + // the + // buffer + FieldList fields = schema.getFields(); + for (int rowNum = 0; + rowNum < root.getRowCount(); + rowNum++) { // for the given number of rows in the batch + + if (Thread.currentThread().isInterrupted() + || queryTaskExecutor.isShutdown()) { // do not process and shutdown + break; // exit the loop, root will be cleared in the finally block + } + + Map curRow = new HashMap<>(); + for (int col = 0; col < fields.size(); col++) { // iterate all the vectors for a given row + com.google.cloud.bigquery.Field field = fields.get(col); + FieldVector curFieldVec = + root.getVector( + field.getName()); // can be accessed using the index or Vector/column name + curRow.put(field.getName(), curFieldVec.getObject(rowNum)); // Added the raw value + } + buffer.put(new BigQueryResultImpl.Row(curRow)); + } + root.clear(); + } catch (RuntimeException | InterruptedException e) { + throw BigQueryException.translateAndThrow(e); + } finally { + try { + root.clear(); + } catch (RuntimeException e) { + logger.log(Level.WARNING, "\n Error while clearing VectorSchemaRoot ", e); + } + } + } + + @Override + public void close() { + root.close(); + allocator.close(); + } + } + + /*Returns just the first page of GetQueryResultsResponse using the jobId*/ + @VisibleForTesting + GetQueryResultsResponse getQueryResultsFirstPage(JobId jobId) { + JobId completeJobId = + jobId + .setProjectId(bigQueryOptions.getProjectId()) + .setLocation( + jobId.getLocation() == null && bigQueryOptions.getLocation() != null + ? bigQueryOptions.getLocation() + : jobId.getLocation()); + + // Implementing logic to poll the Job's status using getQueryResults as + // we do not get rows, rows count and schema unless the job is complete + // Ref: b/241134681 + // This logic relies on backend for poll and wait.BigQuery guarantees that jobs make forward + // progress (a job won't get stuck in pending forever). + boolean jobComplete = false; + GetQueryResultsResponse results = null; + long timeoutMs = 10000; // defaulting to 10seconds. + + while (!jobComplete) { + try { + results = + BigQueryRetryHelper.runWithRetries( + () -> + bigQueryOptions + .getBigQueryRpcV2() + .getQueryResultsWithRowLimitSkipExceptionTranslation( + completeJobId.getProject(), + completeJobId.getJob(), + completeJobId.getLocation(), + connectionSettings.getMaxResultPerPage(), + timeoutMs), + bigQueryOptions.getRetrySettings(), + bigQueryOptions.getResultRetryAlgorithm(), + bigQueryOptions.getClock(), + retryConfig, + false, + null); + + if (results.getErrors() != null) { + List bigQueryErrors = + results.getErrors().stream() + .map(BigQueryError.FROM_PB_FUNCTION) + .collect(Collectors.toList()); + // Throwing BigQueryException since there may be no JobId, and we want to stay consistent + // with the case where there is a HTTP error + throw new BigQueryException(bigQueryErrors); + } + } catch (BigQueryRetryHelperException e) { + logger.log(Level.WARNING, "\n Error occurred while calling getQueryResultsWithRowLimit", e); + throw BigQueryException.translateAndThrow(e); + } + jobComplete = results.getJobComplete(); + + // This log msg at Level.FINE might indicate that the job is still running and not stuck for + // very long running jobs. + logger.log( + Level.FINE, + String.format( + "jobComplete: %s , Polling getQueryResults with timeoutMs: %s", + jobComplete, timeoutMs)); + } + + return results; + } + + @VisibleForTesting + boolean isFastQuerySupported() { + // TODO: add regex logic to check for scripting + return connectionSettings.getClustering() == null + && connectionSettings.getCreateDisposition() == null + && connectionSettings.getDestinationEncryptionConfiguration() == null + && connectionSettings.getDestinationTable() == null + && connectionSettings.getJobTimeoutMs() == null + && connectionSettings.getMaximumBillingTier() == null + && connectionSettings.getPriority() == null + && connectionSettings.getRangePartitioning() == null + && connectionSettings.getSchemaUpdateOptions() == null + && connectionSettings.getTableDefinitions() == null + && connectionSettings.getTimePartitioning() == null + && connectionSettings.getUserDefinedFunctions() == null + && connectionSettings.getWriteDisposition() == null; + } + + @VisibleForTesting + boolean useReadAPI(Long totalRows, Long pageRows, Schema schema, Boolean hasQueryParameters) { + // Read API does not yet support Interval Type or QueryParameters + if (containsIntervalType(schema) || hasQueryParameters) { + logger.log(Level.INFO, "\n Schema has IntervalType, or QueryParameters. Disabling ReadAPI"); + return false; + } + + if (totalRows == null || pageRows == null) { + return connectionSettings.getUseReadAPI(); + } + + if (Boolean.TRUE.equals(connectionSettings.getUseReadAPI())) { + long resultRatio = totalRows / pageRows; + return resultRatio >= connectionSettings.getTotalToPageRowCountRatio() + && totalRows > connectionSettings.getMinResultSize(); + } else { + return false; + } + } + + // Does a BFS iteration to find out if there's an interval type in the schema. Implementation to + // be used until ReadAPI supports IntervalType + private boolean containsIntervalType(Schema schema) { + Queue fields = + new LinkedList(schema.getFields()); + while (!fields.isEmpty()) { + com.google.cloud.bigquery.Field curField = fields.poll(); + if (curField.getType().getStandardType() == StandardSQLTypeName.INTERVAL) { + return true; + } else if (curField.getType().getStandardType() == StandardSQLTypeName.STRUCT + || curField.getType().getStandardType() == StandardSQLTypeName.ARRAY) { + fields.addAll(curField.getSubFields()); + } + } + return false; + } + + // Used for job.query API endpoint + @VisibleForTesting + QueryRequest createQueryRequest( + ConnectionSettings connectionSettings, + String sql, + List queryParameters, + Map labels) { + QueryRequest content = new QueryRequest(); + String requestId = UUID.randomUUID().toString(); + + if (connectionSettings.getConnectionProperties() != null) { + content.setConnectionProperties( + connectionSettings.getConnectionProperties().stream() + .map(ConnectionProperty.TO_PB_FUNCTION) + .collect(Collectors.toList())); + } + if (connectionSettings.getDefaultDataset() != null) { + content.setDefaultDataset(connectionSettings.getDefaultDataset().toPb()); + } + if (connectionSettings.getMaximumBytesBilled() != null) { + content.setMaximumBytesBilled(connectionSettings.getMaximumBytesBilled()); + } + if (connectionSettings.getMaxResults() != null) { + content.setMaxResults(connectionSettings.getMaxResults()); + } + if (queryParameters != null) { + // content.setQueryParameters(queryParameters); + if (queryParameters.get(0).getName() == null) { + // If query parameter name is unset, then assume mode is positional + content.setParameterMode("POSITIONAL"); + // pass query parameters + List queryParametersPb = + Lists.transform(queryParameters, POSITIONAL_PARAMETER_TO_PB_FUNCTION); + content.setQueryParameters(queryParametersPb); + } else { + content.setParameterMode("NAMED"); + // pass query parameters + List queryParametersPb = + Lists.transform(queryParameters, NAMED_PARAMETER_TO_PB_FUNCTION); + content.setQueryParameters(queryParametersPb); + } + } + if (connectionSettings.getCreateSession() != null) { + content.setCreateSession(connectionSettings.getCreateSession()); + } + if (labels != null) { + content.setLabels(labels); + } + content.setQuery(sql); + content.setRequestId(requestId); + // The new Connection interface only supports StandardSQL dialect + content.setUseLegacySql(false); + return content; + } + + // Used by jobs.getQueryResults API endpoint + @VisibleForTesting + com.google.api.services.bigquery.model.Job createQueryJob( + String sql, + ConnectionSettings connectionSettings, + List queryParameters, + Map labels) { + com.google.api.services.bigquery.model.JobConfiguration configurationPb = + new com.google.api.services.bigquery.model.JobConfiguration(); + JobConfigurationQuery queryConfigurationPb = new JobConfigurationQuery(); + queryConfigurationPb.setQuery(sql); + if (queryParameters != null) { + if (queryParameters.get(0).getName() == null) { + // If query parameter name is unset, then assume mode is positional + queryConfigurationPb.setParameterMode("POSITIONAL"); + // pass query parameters + List queryParametersPb = + Lists.transform(queryParameters, POSITIONAL_PARAMETER_TO_PB_FUNCTION); + queryConfigurationPb.setQueryParameters(queryParametersPb); + } else { + queryConfigurationPb.setParameterMode("NAMED"); + // pass query parameters + List queryParametersPb = + Lists.transform(queryParameters, NAMED_PARAMETER_TO_PB_FUNCTION); + queryConfigurationPb.setQueryParameters(queryParametersPb); + } + } + if (connectionSettings.getDestinationTable() != null) { + queryConfigurationPb.setDestinationTable(connectionSettings.getDestinationTable().toPb()); + } + if (connectionSettings.getTableDefinitions() != null) { + queryConfigurationPb.setTableDefinitions( + Maps.transformValues( + connectionSettings.getTableDefinitions(), + ExternalTableDefinition.TO_EXTERNAL_DATA_FUNCTION)); + } + if (connectionSettings.getUserDefinedFunctions() != null) { + queryConfigurationPb.setUserDefinedFunctionResources( + connectionSettings.getUserDefinedFunctions().stream() + .map(UserDefinedFunction.TO_PB_FUNCTION) + .collect(Collectors.toList())); + } + if (connectionSettings.getCreateDisposition() != null) { + queryConfigurationPb.setCreateDisposition( + connectionSettings.getCreateDisposition().toString()); + } + if (connectionSettings.getWriteDisposition() != null) { + queryConfigurationPb.setWriteDisposition(connectionSettings.getWriteDisposition().toString()); + } + if (connectionSettings.getDefaultDataset() != null) { + queryConfigurationPb.setDefaultDataset(connectionSettings.getDefaultDataset().toPb()); + } + if (connectionSettings.getPriority() != null) { + queryConfigurationPb.setPriority(connectionSettings.getPriority().toString()); + } + if (connectionSettings.getAllowLargeResults() != null) { + queryConfigurationPb.setAllowLargeResults(connectionSettings.getAllowLargeResults()); + } + if (connectionSettings.getUseQueryCache() != null) { + queryConfigurationPb.setUseQueryCache(connectionSettings.getUseQueryCache()); + } + if (connectionSettings.getFlattenResults() != null) { + queryConfigurationPb.setFlattenResults(connectionSettings.getFlattenResults()); + } + if (connectionSettings.getMaximumBillingTier() != null) { + queryConfigurationPb.setMaximumBillingTier(connectionSettings.getMaximumBillingTier()); + } + if (connectionSettings.getMaximumBytesBilled() != null) { + queryConfigurationPb.setMaximumBytesBilled(connectionSettings.getMaximumBytesBilled()); + } + if (connectionSettings.getSchemaUpdateOptions() != null) { + ImmutableList.Builder schemaUpdateOptionsBuilder = new ImmutableList.Builder<>(); + for (JobInfo.SchemaUpdateOption schemaUpdateOption : + connectionSettings.getSchemaUpdateOptions()) { + schemaUpdateOptionsBuilder.add(schemaUpdateOption.name()); + } + queryConfigurationPb.setSchemaUpdateOptions(schemaUpdateOptionsBuilder.build()); + } + if (connectionSettings.getDestinationEncryptionConfiguration() != null) { + queryConfigurationPb.setDestinationEncryptionConfiguration( + connectionSettings.getDestinationEncryptionConfiguration().toPb()); + } + if (connectionSettings.getTimePartitioning() != null) { + queryConfigurationPb.setTimePartitioning(connectionSettings.getTimePartitioning().toPb()); + } + if (connectionSettings.getClustering() != null) { + queryConfigurationPb.setClustering(connectionSettings.getClustering().toPb()); + } + if (connectionSettings.getRangePartitioning() != null) { + queryConfigurationPb.setRangePartitioning(connectionSettings.getRangePartitioning().toPb()); + } + if (connectionSettings.getConnectionProperties() != null) { + queryConfigurationPb.setConnectionProperties( + connectionSettings.getConnectionProperties().stream() + .map(ConnectionProperty.TO_PB_FUNCTION) + .collect(Collectors.toList())); + } + if (connectionSettings.getCreateSession() != null) { + queryConfigurationPb.setCreateSession(connectionSettings.getCreateSession()); + } + if (connectionSettings.getJobTimeoutMs() != null) { + configurationPb.setJobTimeoutMs(connectionSettings.getJobTimeoutMs()); + } + if (labels != null) { + configurationPb.setLabels(labels); + } + // The new Connection interface only supports StandardSQL dialect + queryConfigurationPb.setUseLegacySql(false); + configurationPb.setQuery(queryConfigurationPb); + + com.google.api.services.bigquery.model.Job jobPb = + JobInfo.of(QueryJobConfiguration.fromPb(configurationPb)).toPb(); + com.google.api.services.bigquery.model.Job queryJob; + try { + queryJob = + BigQueryRetryHelper.runWithRetries( + () -> + bigQueryOptions + .getBigQueryRpcV2() + .createJobForQuerySkipExceptionTranslation(jobPb), + bigQueryOptions.getRetrySettings(), + bigQueryOptions.getResultRetryAlgorithm(), + bigQueryOptions.getClock(), + retryConfig, + false, + null); + } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { + logger.log(Level.WARNING, "\n Error occurred while calling createJobForQuery", e); + throw BigQueryException.translateAndThrow(e); + } + logger.log(Level.INFO, "\n Query job created"); + return queryJob; + } + + // Used by dryRun + @VisibleForTesting + com.google.api.services.bigquery.model.Job createDryRunJob(String sql) { + com.google.api.services.bigquery.model.JobConfiguration configurationPb = + new com.google.api.services.bigquery.model.JobConfiguration(); + configurationPb.setDryRun(true); + JobConfigurationQuery queryConfigurationPb = new JobConfigurationQuery(); + String parameterMode = sql.contains("?") ? "POSITIONAL" : "NAMED"; + queryConfigurationPb.setParameterMode(parameterMode); + queryConfigurationPb.setQuery(sql); + // UndeclaredQueryParameter is only supported in StandardSQL + queryConfigurationPb.setUseLegacySql(false); + if (connectionSettings.getDefaultDataset() != null) { + queryConfigurationPb.setDefaultDataset(connectionSettings.getDefaultDataset().toPb()); + } + if (connectionSettings.getCreateSession() != null) { + queryConfigurationPb.setCreateSession(connectionSettings.getCreateSession()); + } + configurationPb.setQuery(queryConfigurationPb); + + com.google.api.services.bigquery.model.Job jobPb = + JobInfo.of(QueryJobConfiguration.fromPb(configurationPb)).toPb(); + + com.google.api.services.bigquery.model.Job dryRunJob; + try { + dryRunJob = + BigQueryRetryHelper.runWithRetries( + () -> + bigQueryOptions + .getBigQueryRpcV2() + .createJobForQuerySkipExceptionTranslation(jobPb), + bigQueryOptions.getRetrySettings(), + bigQueryOptions.getResultRetryAlgorithm(), + bigQueryOptions.getClock(), + retryConfig, + false, + null); + } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { + throw BigQueryException.translateAndThrow(e); + } + return dryRunJob; + } + + // Convert from Parameter wrapper class to positional QueryParameter generated class + private static final Function POSITIONAL_PARAMETER_TO_PB_FUNCTION = + value -> { + QueryParameter queryParameterPb = new QueryParameter(); + queryParameterPb.setParameterValue(value.getValue().toValuePb()); + queryParameterPb.setParameterType(value.getValue().toTypePb()); + return queryParameterPb; + }; + + // Convert from Parameter wrapper class to name QueryParameter generated class + private static final Function NAMED_PARAMETER_TO_PB_FUNCTION = + value -> { + QueryParameter queryParameterPb = new QueryParameter(); + queryParameterPb.setName(value.getName()); + queryParameterPb.setParameterValue(value.getValue().toValuePb()); + queryParameterPb.setParameterType(value.getValue().toTypePb()); + return queryParameterPb; + }; + + // Convert from QueryParameter class to the Parameter wrapper class + private static final Function QUERY_PARAMETER_FROM_PB_FUNCTION = + pb -> + Parameter.newBuilder() + .setName(pb.getName() == null ? "" : pb.getName()) + .setValue(QueryParameterValue.fromPb(pb.getParameterValue(), pb.getParameterType())) + .build(); +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionProperty.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionProperty.java index a6206712ed..30e0cf0141 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionProperty.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionProperty.java @@ -16,8 +16,6 @@ package com.google.cloud.bigquery; -import static com.google.common.base.Preconditions.checkNotNull; - import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.base.Objects; @@ -55,7 +53,8 @@ public static final class Builder { private String key; private String value; - private Builder() {}; + private Builder() {} + ; private Builder(ConnectionProperty properties) { this.key = properties.key; @@ -81,8 +80,8 @@ public ConnectionProperty build() { } private ConnectionProperty(Builder builder) { - this.key = checkNotNull(builder.key, "Required key is null or empty"); - this.value = checkNotNull(builder.value, "Required value is null or empty"); + this.key = builder.key; + this.value = builder.value; } /** Return the key of property. */ diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionSettings.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionSettings.java new file mode 100644 index 0000000000..79bc3aac9f --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ConnectionSettings.java @@ -0,0 +1,452 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import com.google.cloud.bigquery.JobInfo.CreateDisposition; +import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; +import com.google.cloud.bigquery.JobInfo.WriteDisposition; +import com.google.cloud.bigquery.QueryJobConfiguration.Priority; +import java.util.List; +import java.util.Map; +import javax.annotation.Nullable; + +/** ConnectionSettings for setting up a BigQuery query connection. */ +@AutoValue +public abstract class ConnectionSettings { + ConnectionSettings() { + // Package private so users can't subclass it but AutoValue can. + } + + /** + * Returns useReadAPI flag, enabled by default. Read API will be used if the underlying conditions + * are satisfied and this flag is enabled + */ + @Nullable + public abstract Boolean getUseReadAPI(); + + /** Returns the synchronous response timeoutMs associated with this query */ + @Nullable + public abstract Long getRequestTimeout(); + + /** Returns the connection properties for connection string with this query */ + @Nullable + public abstract List getConnectionProperties(); + + /** Returns the default dataset */ + @Nullable + public abstract DatasetId getDefaultDataset(); + + /** Returns the limits the bytes billed for this job */ + @Nullable + public abstract Long getMaximumBytesBilled(); + + /** Returns the maximum number of rows of data */ + @Nullable + public abstract Long getMaxResults(); + + /** Returns the number of rows of data to pre-fetch */ + @Nullable + public abstract Integer getNumBufferedRows(); + + @Nullable + public abstract Integer getTotalToPageRowCountRatio(); + + @Nullable + public abstract Integer getMinResultSize(); + + @Nullable + public abstract Integer getMaxResultPerPage(); + + /** Returns whether to look for the result in the query cache */ + @Nullable + public abstract Boolean getUseQueryCache(); + + /** + * Returns whether nested and repeated fields should be flattened. If set to {@code false} {@link + * ConnectionSettings.Builder#setAllowLargeResults(Boolean)} must be {@code true}. + * + * @see Flatten + */ + @Nullable + public abstract Boolean getFlattenResults(); + + /** + * Returns the BigQuery Storage read API configuration @Nullable public abstract + * ReadClientConnectionConfiguration getReadClientConnectionConfiguration(); + */ + + /** + * Below properties are only supported by jobs.insert API and not yet supported by jobs.query API + * * + */ + + /** Returns the clustering specification for the destination table. */ + @Nullable + public abstract Clustering getClustering(); + + /** + * Returns whether the job is allowed to create new tables. + * + * @see + * Create Disposition + */ + @Nullable + public abstract CreateDisposition getCreateDisposition(); + + /** Returns the custom encryption configuration (e.g., Cloud KMS keys) */ + @Nullable + public abstract EncryptionConfiguration getDestinationEncryptionConfiguration(); + + /** + * Returns the table where to put query results. If not provided a new table is created. This + * value is required if {@link #getAllowLargeResults()} is {@code true}. + */ + @Nullable + public abstract TableId getDestinationTable(); + + /** Returns the timeout associated with this job */ + @Nullable + public abstract Long getJobTimeoutMs(); + + /** Returns the optional billing tier limit for this job. */ + @Nullable + public abstract Integer getMaximumBillingTier(); + + /** Returns the query priority. */ + @Nullable + public abstract Priority getPriority(); + + /** + * Returns whether the job is enabled to create arbitrarily large results. If {@code true} the + * query is allowed to create large results at a slight cost in performance. the query is allowed + * to create large results at a slight cost in performance. + * + * @see Returning + * Large Query Results + */ + @Nullable + public abstract Boolean getAllowLargeResults(); + + /** + * Returns whether to create a new session. + * + * @see Create Sessions + */ + @Nullable + public abstract Boolean getCreateSession(); + + /** Returns the range partitioning specification for the table */ + @Nullable + public abstract RangePartitioning getRangePartitioning(); + + /** + * [Experimental] Returns options allowing the schema of the destination table to be updated as a + * side effect of the query job. Schema update options are supported in two cases: when + * writeDisposition is WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination + * table is a partition of a table, specified by partition decorators. For normal tables, + * WRITE_TRUNCATE will always overwrite the schema. + */ + @Nullable + public abstract List getSchemaUpdateOptions(); + + /** + * Returns the external tables definitions. If querying external data sources outside of BigQuery, + * this value describes the data format, location and other properties of the data sources. By + * defining these properties, the data sources can be queried as if they were standard BigQuery + * tables. + */ + @Nullable + public abstract Map getTableDefinitions(); + + /** Returns the time partitioning specification for the destination table. */ + @Nullable + public abstract TimePartitioning getTimePartitioning(); + + /** + * Returns user defined function resources that can be used by this query. Function resources can + * either be defined inline ({@link UserDefinedFunction.Type#INLINE}) or loaded from a Google + * Cloud Storage URI ({@link UserDefinedFunction.Type#FROM_URI}. + */ + @Nullable + public abstract List getUserDefinedFunctions(); + + /** + * Returns the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + @Nullable + public abstract WriteDisposition getWriteDisposition(); + + /** Returns a builder pre-populated using the current values of this field. */ + public abstract Builder toBuilder(); + + /** Returns a builder for a {@code ConnectionSettings} object. */ + public static Builder newBuilder() { + return new AutoValue_ConnectionSettings.Builder().withDefaultValues(); + } + + @AutoValue.Builder + public abstract static class Builder { + + Builder withDefaultValues() { + return setUseReadAPI(true) // Read API is enabled by default + .setNumBufferedRows(10000) // 10K records will be kept in the buffer (Blocking Queue) + .setMinResultSize(200000) // Read API will be enabled when there are at least 100K records + .setTotalToPageRowCountRatio(3) // there should be at least 3 pages of records + .setMaxResultPerPage(100000); // page size for pagination + } + + /** + * Sets useReadAPI flag, enabled by default. Read API will be used if the underlying conditions + * are satisfied and this flag is enabled + * + * @param useReadAPI or {@code true} for none + */ + public abstract Builder setUseReadAPI(Boolean useReadAPI); + + /** + * Sets how long to wait for the query to complete, in milliseconds, before the request times + * out and returns. Note that this is only a timeout for the request, not the query. If the + * query takes longer to run than the timeout value, the call returns without any results and + * with the 'jobComplete' flag set to false. You can call GetQueryResults() to wait for the + * query to complete and read the results. The default value is 10000 milliseconds (10 seconds). + * + * @param timeoutMs or {@code null} for none + */ + public abstract Builder setRequestTimeout(Long timeoutMs); + + /** + * Sets a connection-level property to customize query behavior. + * + * @param connectionProperties connectionProperties or {@code null} for none + */ + public abstract Builder setConnectionProperties(List connectionProperties); + + /** + * Sets the default dataset. This dataset is used for all unqualified table names used in the + * query. + */ + public abstract Builder setDefaultDataset(DatasetId datasetId); + + /** + * Limits the bytes billed for this job. Queries that will have bytes billed beyond this limit + * will fail (without incurring a charge). If unspecified, this will be set to your project + * default. + * + * @param maximumBytesBilled maximum bytes billed for this job + */ + public abstract Builder setMaximumBytesBilled(Long maximumBytesBilled); + + /** + * Sets the maximum number of rows of data to return per page of results. Setting this flag to a + * small value such as 1000 and then paging through results might improve reliability when the + * query result set is large. In addition to this limit, responses are also limited to 10 MB. By + * default, there is no maximum row count, and only the byte limit applies. + * + * @param maxResults maxResults or {@code null} for none + */ + public abstract Builder setMaxResults(Long maxResults); + + /** + * Sets the number of rows in the buffer (a blocking queue) that query results are consumed + * from. + * + * @param numBufferedRows numBufferedRows or {@code null} for none + */ + public abstract Builder setNumBufferedRows(Integer numBufferedRows); + + /** + * Sets a ratio of the total number of records and the records returned in the current page. + * This value is checked before calling the Read API + * + * @param totalToPageRowCountRatio totalToPageRowCountRatio + */ + public abstract Builder setTotalToPageRowCountRatio(Integer totalToPageRowCountRatio); + + /** + * Sets the minimum result size for which the Read API will be enabled + * + * @param minResultSize minResultSize + */ + public abstract Builder setMinResultSize(Integer minResultSize); + + /** + * Sets the maximum records per page to be used for pagination. This is used as an input for the + * tabledata.list and jobs.getQueryResults RPC calls + * + * @param maxResultPerPage + */ + public abstract Builder setMaxResultPerPage(Integer maxResultPerPage); + + /** + * Sets whether to look for the result in the query cache. The query cache is a best-effort + * cache that will be flushed whenever tables in the query are modified. Moreover, the query + * cache is only available when {@link ConnectionSettings.Builder#setDestinationTable(TableId)} + * is not set. + * + * @see Query Caching + */ + public abstract Builder setUseQueryCache(Boolean useQueryCache); + + /** + * Sets whether nested and repeated fields should be flattened. If set to {@code false} {@link + * ConnectionSettings.Builder#setAllowLargeResults(Boolean)} must be {@code true}. By default + * results are flattened. + * + * @see Flatten + */ + public abstract Builder setFlattenResults(Boolean flattenResults); + + /* */ + /**/ + /** + * Sets the values necessary to determine whether table result will be read using the BigQuery + * Storage client Read API. The BigQuery Storage client Read API will be used to read the query + * result when the totalToFirstPageSizeRatio (default 3) and minimumTableSize (default 100 rows) + * conditions set are met. A ReadSession will be created using the Apache Arrow data format for + * serialization. + * + *

    It also sets the maximum number of table rows allowed in buffer before streaming them to + * the BigQueryResult. + * + * @param readClientConnectionConfiguration or {@code null} for none + */ + /* + public abstract Builder setReadClientConnectionConfiguration( + ReadClientConnectionConfiguration readClientConnectionConfiguration);*/ + + /** Sets the clustering specification for the destination table. */ + public abstract Builder setClustering(Clustering clustering); + + /** + * Sets whether the job is allowed to create tables. + * + * @see + * Create Disposition + */ + public abstract Builder setCreateDisposition(CreateDisposition createDisposition); + + /** + * Sets the custom encryption configuration (e.g., Cloud KMS keys). + * + * @param destinationEncryptionConfiguration destinationEncryptionConfiguration or {@code null} + * for none + */ + public abstract Builder setDestinationEncryptionConfiguration( + EncryptionConfiguration destinationEncryptionConfiguration); + + /** + * Sets the table where to put query results. If not provided a new table is created. This value + * is required if {@link ConnectionSettings.Builder#setAllowLargeResults(Boolean)} is set to + * {@code true}. + */ + public abstract Builder setDestinationTable(TableId destinationTable); + + /** + * [Optional] Job timeout in milliseconds. If this time limit is exceeded, BigQuery may attempt + * to terminate the job. + * + * @param jobTimeoutMs jobTimeoutMs or {@code null} for none + */ + public abstract Builder setJobTimeoutMs(Long jobTimeoutMs); + + /** + * Limits the billing tier for this job. Queries that have resource usage beyond this tier will + * fail (without incurring a charge). If unspecified, this will be set to your project default. + * + * @param maximumBillingTier maximum billing tier for this job + */ + public abstract Builder setMaximumBillingTier(Integer maximumBillingTier); + + /** + * Sets a priority for the query. If not specified the priority is assumed to be {@link + * Priority#INTERACTIVE}. + */ + public abstract Builder setPriority(Priority priority); + + /** + * Sets whether the job is enabled to create arbitrarily large results. If {@code true} the + * query is allowed to create large results at a slight cost in performance. If {@code true} + * {@link ConnectionSettings.Builder#setDestinationTable(TableId)} must be provided. + * + * @see Returning + * Large Query Results + */ + public abstract Builder setAllowLargeResults(Boolean allowLargeResults); + + /** + * Sets whether to create a new session. If {@code true} a random session id will be generated + * by BigQuery. If false, runs query with an existing session_id passed in ConnectionProperty, + * otherwise runs query in non-session mode." + */ + public abstract Builder setCreateSession(Boolean createSession); + + /** + * Range partitioning specification for this table. Only one of timePartitioning and + * rangePartitioning should be specified. + * + * @param rangePartitioning rangePartitioning or {@code null} for none + */ + public abstract Builder setRangePartitioning(RangePartitioning rangePartitioning); + + /** + * [Experimental] Sets options allowing the schema of the destination table to be updated as a + * side effect of the query job. Schema update options are supported in two cases: when + * writeDisposition is WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination + * table is a partition of a table, specified by partition decorators. For normal tables, + * WRITE_TRUNCATE will always overwrite the schema. + */ + public abstract Builder setSchemaUpdateOptions(List schemaUpdateOptions); + + /** + * Sets the external tables definitions. If querying external data sources outside of BigQuery, + * this value describes the data format, location and other properties of the data sources. By + * defining these properties, the data sources can be queried as if they were standard BigQuery + * tables. + */ + public abstract Builder setTableDefinitions( + Map tableDefinitions); + + /** Sets the time partitioning specification for the destination table. */ + public abstract Builder setTimePartitioning(TimePartitioning timePartitioning); + + /** + * Sets user defined function resources that can be used by this query. Function resources can + * either be defined inline ({@link UserDefinedFunction#inline(String)}) or loaded from a Google + * Cloud Storage URI ({@link UserDefinedFunction#fromUri(String)}. + */ + public abstract Builder setUserDefinedFunctions(List userDefinedFunctions); + + /** + * Sets the action that should occur if the destination table already exists. + * + * @see + * Write Disposition + */ + public abstract Builder setWriteDisposition(WriteDisposition writeDisposition); + + /** Creates a {@code ConnectionSettings} object. */ + public abstract ConnectionSettings build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java index d0e15e49ee..54e612271f 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java @@ -38,22 +38,28 @@ public final class CopyJobConfiguration extends JobConfiguration { private final List sourceTables; private final TableId destinationTable; + private final String operationType; + private final String destinationExpirationTime; private final JobInfo.CreateDisposition createDisposition; private final JobInfo.WriteDisposition writeDisposition; private final EncryptionConfiguration destinationEncryptionConfiguration; private final Map labels; private final Long jobTimeoutMs; + private final String reservation; public static final class Builder extends JobConfiguration.Builder { private List sourceTables; private TableId destinationTable; + private String operationType; + private String destinationExpirationTime; private JobInfo.CreateDisposition createDisposition; private JobInfo.WriteDisposition writeDisposition; private EncryptionConfiguration destinationEncryptionConfiguration; private Map labels; private Long jobTimeoutMs; + private String reservation; private Builder() { super(Type.COPY); @@ -63,17 +69,27 @@ private Builder(CopyJobConfiguration jobConfiguration) { this(); this.sourceTables = jobConfiguration.sourceTables; this.destinationTable = jobConfiguration.destinationTable; + this.operationType = jobConfiguration.operationType; + this.destinationExpirationTime = jobConfiguration.destinationExpirationTime; this.createDisposition = jobConfiguration.createDisposition; this.writeDisposition = jobConfiguration.writeDisposition; this.destinationEncryptionConfiguration = jobConfiguration.destinationEncryptionConfiguration; this.labels = jobConfiguration.labels; this.jobTimeoutMs = jobConfiguration.jobTimeoutMs; + this.reservation = jobConfiguration.reservation; } private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { this(); JobConfigurationTableCopy copyConfigurationPb = configurationPb.getCopy(); this.destinationTable = TableId.fromPb(copyConfigurationPb.getDestinationTable()); + if (copyConfigurationPb.getOperationType() != null) { + this.operationType = copyConfigurationPb.getOperationType(); + } + if (copyConfigurationPb.getDestinationExpirationTime() != null) { + this.destinationExpirationTime = + copyConfigurationPb.getDestinationExpirationTime().toString(); + } if (copyConfigurationPb.getSourceTables() != null) { this.sourceTables = Lists.transform(copyConfigurationPb.getSourceTables(), TableId.FROM_PB_FUNCTION); @@ -100,6 +116,9 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur if (configurationPb.getJobTimeoutMs() != null) { this.jobTimeoutMs = configurationPb.getJobTimeoutMs(); } + if (configurationPb.getReservation() != null) { + this.reservation = configurationPb.getReservation(); + } } /** Sets the source tables to copy. */ @@ -114,6 +133,25 @@ public Builder setDestinationTable(TableId destinationTable) { return this; } + /** + * Sets the supported operation types (COPY, CLONE, SNAPSHOT or RESTORE) in table copy job. More + * info: https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#operationtype + */ + public Builder setOperationType(String operationType) { + this.operationType = operationType; + return this; + } + + /** + * Sets the time when the destination table expires. Expired tables will be deleted and their + * storage reclaimed. More info: + * https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#jobconfigurationtablecopy + */ + public Builder setDestinationExpirationTime(String destinationExpirationTime) { + this.destinationExpirationTime = destinationExpirationTime; + return this; + } + public Builder setDestinationEncryptionConfiguration( EncryptionConfiguration encryptionConfiguration) { this.destinationEncryptionConfiguration = encryptionConfiguration; @@ -169,6 +207,19 @@ public Builder setJobTimeoutMs(Long jobTimeoutMs) { return this; } + /** + * [Optional] The reservation that job would use. User can specify a reservation to execute the + * job. If reservation is not set, reservation is determined based on the rules defined by the + * reservation assignments. The expected format is + * `projects/{project}/locations/{location}/reservations/{reservation}`. + * + * @param reservation reservation or {@code null} for none + */ + public Builder setReservation(String reservation) { + this.reservation = reservation; + return this; + } + public CopyJobConfiguration build() { return new CopyJobConfiguration(this); } @@ -178,11 +229,14 @@ private CopyJobConfiguration(Builder builder) { super(builder); this.sourceTables = checkNotNull(builder.sourceTables); this.destinationTable = checkNotNull(builder.destinationTable); + this.operationType = builder.operationType; + this.destinationExpirationTime = builder.destinationExpirationTime; this.createDisposition = builder.createDisposition; this.writeDisposition = builder.writeDisposition; this.destinationEncryptionConfiguration = builder.destinationEncryptionConfiguration; this.labels = builder.labels; this.jobTimeoutMs = builder.jobTimeoutMs; + this.reservation = builder.reservation; } /** Returns the source tables to copy. */ @@ -195,6 +249,16 @@ public TableId getDestinationTable() { return destinationTable; } + /** Returns the table copy job type */ + public String getOperationType() { + return operationType; + } + + /** Returns the time when the destination table expires */ + public String getDestinationExpirationTime() { + return destinationExpirationTime; + } + public EncryptionConfiguration getDestinationEncryptionConfiguration() { return destinationEncryptionConfiguration; } @@ -231,6 +295,11 @@ public Long getJobTimeoutMs() { return jobTimeoutMs; } + /** Returns the reservation associated with this job */ + public String getReservation() { + return reservation; + } + @Override public Builder toBuilder() { return new Builder(this); @@ -241,11 +310,14 @@ ToStringHelper toStringHelper() { return super.toStringHelper() .add("sourceTables", sourceTables) .add("destinationTable", destinationTable) + .add("operationType", operationType) + .add("destinationExpirationTime", destinationExpirationTime) .add("destinationEncryptionConfiguration", destinationEncryptionConfiguration) .add("createDisposition", createDisposition) .add("writeDisposition", writeDisposition) .add("labels", labels) - .add("jobTimeoutMs", jobTimeoutMs); + .add("jobTimeoutMs", jobTimeoutMs) + .add("reservation", reservation); } @Override @@ -260,10 +332,13 @@ public int hashCode() { baseHashCode(), sourceTables, destinationTable, + operationType, + destinationExpirationTime, createDisposition, writeDisposition, labels, - jobTimeoutMs); + jobTimeoutMs, + reservation); } @Override @@ -293,11 +368,15 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { com.google.api.services.bigquery.model.JobConfiguration jobConfiguration = new com.google.api.services.bigquery.model.JobConfiguration(); configurationPb.setDestinationTable(destinationTable.toPb()); - if (sourceTables.size() == 1) { - configurationPb.setSourceTable(sourceTables.get(0).toPb()); - } else { + if (sourceTables != null) { configurationPb.setSourceTables(Lists.transform(sourceTables, TableId.TO_PB_FUNCTION)); } + if (operationType != null) { + configurationPb.setOperationType(operationType); + } + if (destinationExpirationTime != null) { + configurationPb.setDestinationExpirationTime(destinationExpirationTime); + } if (createDisposition != null) { configurationPb.setCreateDisposition(createDisposition.toString()); } @@ -314,6 +393,9 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { if (jobTimeoutMs != null) { jobConfiguration.setJobTimeoutMs(jobTimeoutMs); } + if (reservation != null) { + jobConfiguration.setReservation(reservation); + } jobConfiguration.setCopy(configurationPb); return jobConfiguration; } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java index 6ef9e77fc1..b39c82a7ed 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java @@ -26,14 +26,16 @@ */ public final class CsvOptions extends FormatOptions { - private static final long serialVersionUID = 2193570529308612708L; + private static final long serialVersionUID = 2193570529308612709L; private final Boolean allowJaggedRows; private final Boolean allowQuotedNewLines; private final String encoding; private final String fieldDelimiter; + private final String nullMarker; private final String quote; private final Long skipLeadingRows; + private final Boolean preserveAsciiControlCharacters; public static final class Builder { @@ -41,8 +43,10 @@ public static final class Builder { private Boolean allowQuotedNewLines; private String encoding; private String fieldDelimiter; + private String nullMarker; private String quote; private Long skipLeadingRows; + private Boolean preserveAsciiControlCharacters; private Builder() {} @@ -51,8 +55,10 @@ private Builder(CsvOptions csvOptions) { this.allowQuotedNewLines = csvOptions.allowQuotedNewLines; this.encoding = csvOptions.encoding; this.fieldDelimiter = csvOptions.fieldDelimiter; + this.nullMarker = csvOptions.nullMarker; this.quote = csvOptions.quote; this.skipLeadingRows = csvOptions.skipLeadingRows; + this.preserveAsciiControlCharacters = csvOptions.preserveAsciiControlCharacters; } /** @@ -107,6 +113,18 @@ public Builder setFieldDelimiter(String fieldDelimiter) { return this; } + /** + * [Optional] Specifies a string that represents a null value in a CSV file. For example, if you + * specify \"\\N\", BigQuery interprets \"\\N\" as a null value when querying a CSV file. The + * default value is the empty string. If you set this property to a custom value, BigQuery + * throws an error if an empty string is present for all data types except for STRING and BYTE. + * For STRING and BYTE columns, BigQuery interprets the empty string as an empty value. + */ + public Builder setNullMarker(String nullMarker) { + this.nullMarker = nullMarker; + return this; + } + /** * Sets the value that is used to quote data sections in a CSV file. BigQuery converts the * string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split @@ -130,6 +148,15 @@ public Builder setSkipLeadingRows(long skipLeadingRows) { return this; } + /** + * Sets whether BigQuery should allow ASCII control characters in a CSV file. By default ASCII + * control characters are not allowed. + */ + public Builder setPreserveAsciiControlCharacters(boolean preserveAsciiControlCharacters) { + this.preserveAsciiControlCharacters = preserveAsciiControlCharacters; + return this; + } + /** Creates a {@code CsvOptions} object. */ public CsvOptions build() { return new CsvOptions(this); @@ -142,8 +169,10 @@ private CsvOptions(Builder builder) { this.allowQuotedNewLines = builder.allowQuotedNewLines; this.encoding = builder.encoding; this.fieldDelimiter = builder.fieldDelimiter; + this.nullMarker = builder.nullMarker; this.quote = builder.quote; this.skipLeadingRows = builder.skipLeadingRows; + this.preserveAsciiControlCharacters = builder.preserveAsciiControlCharacters; } /** @@ -179,6 +208,11 @@ public String getFieldDelimiter() { return fieldDelimiter; } + /** Returns the string that represents a null value in a CSV file. */ + public String getNullMarker() { + return nullMarker; + } + /** Returns the value that is used to quote data sections in a CSV file. */ public String getQuote() { return quote; @@ -192,6 +226,14 @@ public Long getSkipLeadingRows() { return skipLeadingRows; } + /** + * Returns whether BigQuery should allow ascii control characters in a CSV file. By default ascii + * control characters are not allowed. + */ + public Boolean getPreserveAsciiControlCharacters() { + return preserveAsciiControlCharacters; + } + /** Returns a builder for the {@code CsvOptions} object. */ public Builder toBuilder() { return new Builder(this); @@ -205,8 +247,10 @@ public String toString() { .add("allowQuotedNewLines", allowQuotedNewLines) .add("encoding", encoding) .add("fieldDelimiter", fieldDelimiter) + .add("nullMarker", nullMarker) .add("quote", quote) .add("skipLeadingRows", skipLeadingRows) + .add("preserveAsciiControlCharacters", preserveAsciiControlCharacters) .toString(); } @@ -218,8 +262,10 @@ public int hashCode() { allowQuotedNewLines, encoding, fieldDelimiter, + nullMarker, quote, - skipLeadingRows); + skipLeadingRows, + preserveAsciiControlCharacters); } @Override @@ -235,8 +281,10 @@ com.google.api.services.bigquery.model.CsvOptions toPb() { csvOptions.setAllowQuotedNewlines(allowQuotedNewLines); csvOptions.setEncoding(encoding); csvOptions.setFieldDelimiter(fieldDelimiter); + csvOptions.setNullMarker(nullMarker); csvOptions.setQuote(quote); csvOptions.setSkipLeadingRows(skipLeadingRows); + csvOptions.setPreserveAsciiControlCharacters(preserveAsciiControlCharacters); return csvOptions; } @@ -259,12 +307,18 @@ static CsvOptions fromPb(com.google.api.services.bigquery.model.CsvOptions csvOp if (csvOptions.getFieldDelimiter() != null) { builder.setFieldDelimiter(csvOptions.getFieldDelimiter()); } + if (csvOptions.getNullMarker() != null) { + builder.setNullMarker(csvOptions.getNullMarker()); + } if (csvOptions.getQuote() != null) { builder.setQuote(csvOptions.getQuote()); } if (csvOptions.getSkipLeadingRows() != null) { builder.setSkipLeadingRows(csvOptions.getSkipLeadingRows()); } + if (csvOptions.getPreserveAsciiControlCharacters() != null) { + builder.setPreserveAsciiControlCharacters(csvOptions.getPreserveAsciiControlCharacters()); + } return builder.build(); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DataFormatOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DataFormatOptions.java new file mode 100644 index 0000000000..beaadf32cf --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DataFormatOptions.java @@ -0,0 +1,73 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import java.io.Serializable; + +/** + * Google BigQuery DataFormatOptions. Configures the output format for data types returned from + * BigQuery. + */ +@AutoValue +public abstract class DataFormatOptions implements Serializable { + public enum TimestampFormatOptions { + TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED("TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED"), + FLOAT64("FLOAT64"), + INT64("INT64"), + ISO8601_STRING("ISO8601_STRING"); + + private final String format; + + TimestampFormatOptions(String format) { + this.format = format; + } + + @Override + public String toString() { + return format; + } + } + + public abstract boolean useInt64Timestamp(); + + public abstract TimestampFormatOptions timestampFormatOptions(); + + public static Builder newBuilder() { + return new AutoValue_DataFormatOptions.Builder() + .useInt64Timestamp(false) + .timestampFormatOptions(TimestampFormatOptions.TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED); + } + + public abstract Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + public abstract Builder useInt64Timestamp(boolean useInt64Timestamp); + + public abstract Builder timestampFormatOptions(TimestampFormatOptions timestampFormatOptions); + + public abstract DataFormatOptions build(); + } + + com.google.api.services.bigquery.model.DataFormatOptions toPb() { + com.google.api.services.bigquery.model.DataFormatOptions request = + new com.google.api.services.bigquery.model.DataFormatOptions(); + request.setUseInt64Timestamp(useInt64Timestamp()); + request.setTimestampOutputFormat(timestampFormatOptions().toString()); + return request; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java index 05a7e85662..18606e7013 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java @@ -23,6 +23,7 @@ import com.google.cloud.bigquery.BigQuery.DatasetOption; import com.google.cloud.bigquery.BigQuery.TableListOption; import com.google.cloud.bigquery.BigQuery.TableOption; +import com.google.common.base.Strings; import java.io.IOException; import java.io.ObjectInputStream; import java.util.List; @@ -145,6 +146,36 @@ public Builder setDefaultPartitionExpirationMs(Long defaultPartitionExpirationMs return this; } + @Override + public Builder setDefaultCollation(String defaultCollation) { + infoBuilder.setDefaultCollation(defaultCollation); + return this; + } + + @Override + public Builder setExternalDatasetReference(ExternalDatasetReference externalDatasetReference) { + infoBuilder.setExternalDatasetReference(externalDatasetReference); + return this; + } + + @Override + public Builder setStorageBillingModel(String storageBillingModel) { + infoBuilder.setStorageBillingModel(storageBillingModel); + return this; + } + + @Override + public Builder setMaxTimeTravelHours(Long maxTimeTravelHours) { + infoBuilder.setMaxTimeTravelHours(maxTimeTravelHours); + return this; + } + + @Override + public Builder setResourceTags(Map resourceTags) { + infoBuilder.setResourceTags(resourceTags); + return this; + } + @Override public Dataset build() { return new Dataset(bigquery, infoBuilder); @@ -275,7 +306,13 @@ public Page

    list(TableListOption... options) { * @throws BigQueryException upon failure */ public Table get(String tableId, TableOption... options) { - return bigquery.getTable(TableId.of(getDatasetId().getDataset(), tableId), options); + // Adding the projectId used of getting the DataSet as a parameter for the issue: + // https://github.com/googleapis/java-bigquery/issues/1369 + TableId tabId = + Strings.isNullOrEmpty(getDatasetId().getProject()) + ? TableId.of(getDatasetId().getDataset(), tableId) + : TableId.of(getDatasetId().getProject(), getDatasetId().getDataset(), tableId); + return bigquery.getTable(tabId, options); } /** diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java index 0e2ad02b21..ff42d5d360 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java @@ -16,10 +16,8 @@ package com.google.cloud.bigquery; -import static com.google.common.base.Preconditions.checkArgument; -import static com.google.common.base.Strings.isNullOrEmpty; - import com.google.api.services.bigquery.model.DatasetReference; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Objects; @@ -42,7 +40,6 @@ public String getDataset() { } private DatasetId(String project, String dataset) { - checkArgument(!isNullOrEmpty(dataset), "Provided dataset is null or empty"); this.project = project; this.dataset = dataset; } @@ -84,4 +81,11 @@ DatasetReference toPb() { static DatasetId fromPb(DatasetReference datasetRef) { return new DatasetId(datasetRef.getProjectId(), datasetRef.getDatasetId()); } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.dataset.project", this.getProject()) + .put("bq.dataset.id", this.getDataset()) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java index c7c6d0c19f..9187619951 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java @@ -27,6 +27,7 @@ import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.List; import java.util.Map; @@ -69,9 +70,14 @@ public Dataset apply(DatasetInfo datasetInfo) { private final Long lastModified; private final String location; private final String selfLink; - private final Labels labels; + private final Annotations labels; private final EncryptionConfiguration defaultEncryptionConfiguration; private final Long defaultPartitionExpirationMs; + private final String defaultCollation; + private final ExternalDatasetReference externalDatasetReference; + private final String storageBillingModel; + private final Long maxTimeTravelHours; + private final Annotations resourceTags; /** A builder for {@code DatasetInfo} objects. */ public abstract static class Builder { @@ -126,6 +132,25 @@ public abstract static class Builder { public abstract Builder setLabels(Map labels); + /** + * Optional. Information about the external metadata storage where the dataset is defined. + * Filled out when the dataset type is EXTERNAL + */ + public abstract Builder setExternalDatasetReference( + ExternalDatasetReference externalDatasetReference); + + /** + * Optional. Storage billing model to be used for all tables in the dataset. Can be set to + * PHYSICAL. Default is LOGICAL. + */ + public abstract Builder setStorageBillingModel(String storageBillingModel); + + /** + * Optional. Defines the time travel window in hours. The value can be from 48 to 168 hours (2 + * to 7 days). The default value is 168 hours if this is not set. The value may be {@code null}. + */ + public abstract Builder setMaxTimeTravelHours(Long maxTimeTravelHours); + /** * The default encryption key for all tables in the dataset. Once this property is set, all * newly-created partitioned tables in the dataset will have encryption key set to this value, @@ -148,6 +173,32 @@ public abstract Builder setDefaultEncryptionConfiguration( */ public abstract Builder setDefaultPartitionExpirationMs(Long defaultPartitionExpirationMs); + /** + * Optional. Defines the default collation specification of future tables created in the + * dataset. If a table is created in this dataset without table-level default collation, then + * the table inherits the dataset default collation, which is applied to the string fields that + * do not have explicit collation specified. A change to this field affects only tables created + * afterwards, and does not alter the existing tables. The following values are supported: + * + *

    * 'und:ci': undetermined locale, case insensitive. * '': empty string. Default to + * case-sensitive behavior. (-- A wrapper is used here because it is possible to set the value + * to the empty string. --) (-- api-linter: standard-fields=disabled --) + */ + public abstract Builder setDefaultCollation(String defaultCollation); + + /** + * Optional. The tags attached to this + * dataset. Tag keys are globally unique. Tag key is expected to be in the namespaced format, + * for example "123456789012/environment" where 123456789012 is the ID of the parent + * organization or project resource for this tag key. Tag value is expected to be the short + * name, for example "Production". + * + * @see Tag + * definitions for more details. + * @param resourceTags resourceTags or {@code null} for none + */ + public abstract Builder setResourceTags(Map resourceTags); + /** Creates a {@code DatasetInfo} object. */ public abstract DatasetInfo build(); } @@ -165,9 +216,14 @@ static final class BuilderImpl extends Builder { private Long lastModified; private String location; private String selfLink; - private Labels labels = Labels.ZERO; + private Annotations labels = Annotations.ZERO; private EncryptionConfiguration defaultEncryptionConfiguration; private Long defaultPartitionExpirationMs; + private String defaultCollation; + private ExternalDatasetReference externalDatasetReference; + private String storageBillingModel; + private Long maxTimeTravelHours; + private Annotations resourceTags = Annotations.ZERO; BuilderImpl() {} @@ -186,6 +242,11 @@ static final class BuilderImpl extends Builder { this.labels = datasetInfo.labels; this.defaultEncryptionConfiguration = datasetInfo.defaultEncryptionConfiguration; this.defaultPartitionExpirationMs = datasetInfo.defaultPartitionExpirationMs; + this.defaultCollation = datasetInfo.defaultCollation; + this.externalDatasetReference = datasetInfo.externalDatasetReference; + this.storageBillingModel = datasetInfo.storageBillingModel; + this.maxTimeTravelHours = datasetInfo.maxTimeTravelHours; + this.resourceTags = datasetInfo.resourceTags; } BuilderImpl(com.google.api.services.bigquery.model.Dataset datasetPb) { @@ -212,13 +273,21 @@ public Acl apply(Dataset.Access accessPb) { this.lastModified = datasetPb.getLastModifiedTime(); this.location = datasetPb.getLocation(); this.selfLink = datasetPb.getSelfLink(); - this.labels = Labels.fromPb(datasetPb.getLabels()); + this.labels = Annotations.fromPb(datasetPb.getLabels()); if (datasetPb.getDefaultEncryptionConfiguration() != null) { this.defaultEncryptionConfiguration = new EncryptionConfiguration.Builder(datasetPb.getDefaultEncryptionConfiguration()) .build(); } this.defaultPartitionExpirationMs = datasetPb.getDefaultPartitionExpirationMs(); + this.defaultCollation = datasetPb.getDefaultCollation(); + if (datasetPb.getExternalDatasetReference() != null) { + this.externalDatasetReference = + ExternalDatasetReference.fromPb(datasetPb.getExternalDatasetReference()); + } + this.storageBillingModel = datasetPb.getStorageBillingModel(); + this.maxTimeTravelHours = datasetPb.getMaxTimeTravelHours(); + this.resourceTags = Annotations.fromPb(datasetPb.getResourceTags()); } @Override @@ -296,7 +365,7 @@ Builder setSelfLink(String selfLink) { */ @Override public Builder setLabels(Map labels) { - this.labels = Labels.fromUser(labels); + this.labels = Annotations.fromUser(labels); return this; } @@ -313,6 +382,36 @@ public Builder setDefaultPartitionExpirationMs(Long defaultPartitionExpirationMs return this; } + @Override + public Builder setDefaultCollation(String defaultCollation) { + this.defaultCollation = defaultCollation; + return this; + } + + @Override + public Builder setExternalDatasetReference(ExternalDatasetReference externalDatasetReference) { + this.externalDatasetReference = externalDatasetReference; + return this; + } + + @Override + public Builder setStorageBillingModel(String storageBillingModel) { + this.storageBillingModel = storageBillingModel; + return this; + } + + @Override + public Builder setMaxTimeTravelHours(Long maxTimeTravelHours) { + this.maxTimeTravelHours = maxTimeTravelHours; + return this; + } + + @Override + public Builder setResourceTags(Map resourceTags) { + this.resourceTags = Annotations.fromUser(resourceTags); + return this; + } + @Override public DatasetInfo build() { return new DatasetInfo(this); @@ -334,6 +433,11 @@ public DatasetInfo build() { labels = builder.labels; defaultEncryptionConfiguration = builder.defaultEncryptionConfiguration; defaultPartitionExpirationMs = builder.defaultPartitionExpirationMs; + defaultCollation = builder.defaultCollation; + externalDatasetReference = builder.externalDatasetReference; + storageBillingModel = builder.storageBillingModel; + maxTimeTravelHours = builder.maxTimeTravelHours; + resourceTags = builder.resourceTags; } /** Returns the dataset identity. */ @@ -459,6 +563,45 @@ public Long getDefaultPartitionExpirationMs() { return defaultPartitionExpirationMs; } + public String getDefaultCollation() { + return defaultCollation; + } + + public String getStorageBillingModel() { + return storageBillingModel; + } + + /** + * Returns the number of hours that deleted or updated data will be available to be queried for + * all tables in the dataset. + */ + public Long getMaxTimeTravelHours() { + return maxTimeTravelHours; + } + + /** + * Optional. The tags attached to this + * dataset. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for + * example "123456789012/environment" where 123456789012 is the ID of the parent organization or + * project resource for this tag key. Tag value is expected to be the short name, for example + * "Production". + * + * @see Tag + * definitions for more details. + * @return value or {@code null} for none + */ + public Map getResourceTags() { + return resourceTags.userMap(); + } + + /** + * Returns information about the external metadata storage where the dataset is defined. Filled + * out when the dataset type is EXTERNAL. + */ + public ExternalDatasetReference getExternalDatasetReference() { + return externalDatasetReference; + } + /** Returns a builder for the dataset object. */ public Builder toBuilder() { return new BuilderImpl(this); @@ -481,6 +624,11 @@ public String toString() { .add("labels", labels) .add("defaultEncryptionConfiguration", defaultEncryptionConfiguration) .add("defaultPartitionExpirationMs", defaultPartitionExpirationMs) + .add("defaultCollation", defaultCollation) + .add("externalDatasetReference", externalDatasetReference) + .add("storageBillingModel", storageBillingModel) + .add("maxTimeTravelHours", maxTimeTravelHours) + .add("resourceTags", resourceTags) .toString(); } @@ -556,6 +704,19 @@ public Dataset.Access apply(Acl acl) { if (defaultPartitionExpirationMs != null) { datasetPb.setDefaultPartitionExpirationMs(defaultPartitionExpirationMs); } + if (defaultCollation != null) { + datasetPb.setDefaultCollation(defaultCollation); + } + if (externalDatasetReference != null) { + datasetPb.setExternalDatasetReference(externalDatasetReference.toPb()); + } + if (storageBillingModel != null) { + datasetPb.setStorageBillingModel(storageBillingModel); + } + if (maxTimeTravelHours != null) { + datasetPb.setMaxTimeTravelHours(maxTimeTravelHours); + } + datasetPb.setResourceTags(resourceTags.toPb()); return datasetPb; } @@ -586,6 +747,18 @@ public static DatasetInfo of(String datasetId) { return newBuilder(datasetId).build(); } + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .putAll(this.getDatasetId().getOtelAttributes()) + .put("bq.dataset.last_modified", getFieldAsString(this.getLastModified())) + .put("bq.dataset.location", getFieldAsString(this.getLocation())) + .build(); + } + static DatasetInfo fromPb(Dataset datasetPb) { return new BuilderImpl(datasetPb).build(); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DmlStats.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DmlStats.java new file mode 100644 index 0000000000..0ce97dc0cb --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DmlStats.java @@ -0,0 +1,112 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.api.services.bigquery.model.DmlStatistics; +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import javax.annotation.Nullable; + +/** Represents DML statistics information. */ +@AutoValue +public abstract class DmlStats implements Serializable { + + @AutoValue.Builder + public abstract static class Builder { + /** + * Number of deleted Rows. populated by DML DELETE, MERGE and TRUNCATE statements. + * + * @param deletedRowCount deletedRowCount or {@code null} for none + */ + public abstract Builder setDeletedRowCount(Long deletedRowCount); + + /** + * Number of inserted Rows. Populated by DML INSERT and MERGE statements. + * + * @param insertedRowCount insertedRowCount or {@code null} for none + */ + public abstract Builder setInsertedRowCount(Long insertedRowCount); + + /** + * Number of updated Rows. Populated by DML UPDATE and MERGE statements. + * + * @param updatedRowCount updatedRowCount or {@code null} for none + */ + public abstract Builder setUpdatedRowCount(Long updatedRowCount); + + /** Creates a {@code DmlStats} object. */ + public abstract DmlStats build(); + } + + /** + * Returns number of deleted Rows. populated by DML DELETE, MERGE and TRUNCATE statements. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract Long getDeletedRowCount(); + + /** + * Returns number of inserted Rows. Populated by DML INSERT and MERGE statements. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract Long getInsertedRowCount(); + + /** + * Returns number of updated Rows. Populated by DML UPDATE and MERGE statements. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract Long getUpdatedRowCount(); + + public abstract Builder toBuilder(); + + public static Builder newBuilder() { + return new AutoValue_DmlStats.Builder(); + } + + DmlStatistics toPb() { + DmlStatistics dmlStatisticsPb = new DmlStatistics(); + if (getDeletedRowCount() != null) { + dmlStatisticsPb.setDeletedRowCount(getDeletedRowCount()); + } + if (getInsertedRowCount() != null) { + dmlStatisticsPb.setInsertedRowCount(getInsertedRowCount()); + } + if (getUpdatedRowCount() != null) { + dmlStatisticsPb.setUpdatedRowCount(getUpdatedRowCount()); + } + return dmlStatisticsPb; + } + + static DmlStats fromPb(DmlStatistics dmlStatisticsPb) { + Builder builder = newBuilder(); + if (dmlStatisticsPb.getDeletedRowCount() != null) { + builder.setDeletedRowCount(dmlStatisticsPb.getDeletedRowCount()); + } + if (dmlStatisticsPb.getInsertedRowCount() != null) { + builder.setInsertedRowCount(dmlStatisticsPb.getInsertedRowCount()); + } + if (dmlStatisticsPb.getUpdatedRowCount() != null) { + builder.setUpdatedRowCount(dmlStatisticsPb.getUpdatedRowCount()); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExecuteSelectResponse.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExecuteSelectResponse.java new file mode 100644 index 0000000000..59745020f5 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExecuteSelectResponse.java @@ -0,0 +1,47 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import javax.annotation.Nullable; + +@AutoValue +public abstract class ExecuteSelectResponse implements Serializable { + @Nullable + public abstract BigQueryResult getResultSet(); + + public abstract boolean getIsSuccessful(); + + @Nullable + public abstract BigQuerySQLException getBigQuerySQLException(); + + public static Builder newBuilder() { + return new AutoValue_ExecuteSelectResponse.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + public abstract ExecuteSelectResponse build(); + + public abstract Builder setResultSet(BigQueryResult bigQueryResult); + + public abstract Builder setIsSuccessful(boolean isSuccessful); + + public abstract Builder setBigQuerySQLException(BigQuerySQLException bigQuerySQLException); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalDatasetReference.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalDatasetReference.java new file mode 100644 index 0000000000..ecfe54c50c --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalDatasetReference.java @@ -0,0 +1,79 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import com.google.common.annotations.VisibleForTesting; +import java.io.Serializable; +import javax.annotation.Nullable; + +/** Configures the access a dataset defined in an external metadata storage. */ +@AutoValue +public abstract class ExternalDatasetReference implements Serializable { + + public static ExternalDatasetReference.Builder newBuilder() { + return new AutoValue_ExternalDatasetReference.Builder(); + } + + static ExternalDatasetReference fromPb( + com.google.api.services.bigquery.model.ExternalDatasetReference externalDatasetReference) { + ExternalDatasetReference.Builder builder = newBuilder(); + + if (externalDatasetReference.getConnection() != null) { + builder.setConnection(externalDatasetReference.getConnection()); + } + if (externalDatasetReference.getExternalSource() != null) { + builder.setExternalSource(externalDatasetReference.getExternalSource()); + } + + return builder.build(); + } + + public com.google.api.services.bigquery.model.ExternalDatasetReference toPb() { + com.google.api.services.bigquery.model.ExternalDatasetReference externalDatasetReference = + new com.google.api.services.bigquery.model.ExternalDatasetReference(); + + externalDatasetReference.setConnection(getConnection()); + externalDatasetReference.setExternalSource(getExternalSource()); + return externalDatasetReference; + } + + @Nullable + public abstract String getConnection(); + + @Nullable + public abstract String getExternalSource(); + + /** Returns a builder for an ExternalDatasetReference. */ + @VisibleForTesting + public abstract ExternalDatasetReference.Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + /** + * The connection id that is used to access the external_source. Format: + * projects/{project_id}/locations/{location_id}/connections/{connection_id} * + */ + public abstract ExternalDatasetReference.Builder setConnection(String connection); + + /** External source that backs this dataset * */ + public abstract ExternalDatasetReference.Builder setExternalSource(String externalSource); + + /** Creates a {@code ExternalDatasetReference} object. */ + public abstract ExternalDatasetReference build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java index 5eb0f4c575..c1859c3f9f 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java @@ -19,9 +19,12 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Strings.isNullOrEmpty; +import com.google.api.core.ApiFunction; import com.google.api.services.bigquery.model.ExternalDataConfiguration; import com.google.api.services.bigquery.model.Table; import com.google.auto.value.AutoValue; +import com.google.cloud.StringEnumType; +import com.google.cloud.StringEnumValue; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import java.util.List; @@ -57,6 +60,46 @@ public ExternalDataConfiguration apply(ExternalTableDefinition tableInfo) { private static final long serialVersionUID = -5951580238459622025L; + public static final class SourceColumnMatch extends StringEnumValue { + private static final long serialVersionUID = 818920627219751207L; + private static final ApiFunction CONSTRUCTOR = + new ApiFunction() { + @Override + public SourceColumnMatch apply(String constant) { + return new SourceColumnMatch(constant); + } + }; + + private static final StringEnumType type = + new StringEnumType(SourceColumnMatch.class, CONSTRUCTOR); + + public static final SourceColumnMatch POSITION = type.createAndRegister("POSITION"); + + public static final SourceColumnMatch NAME = type.createAndRegister("NAME"); + + private SourceColumnMatch(String constant) { + super(constant); + } + + /** + * Get the SourceColumnMatch for the given String constant, and throw an exception if the + * constant is not recognized. + */ + public static SourceColumnMatch valueOfStrict(String constant) { + return type.valueOfStrict(constant); + } + + /** Get the SourceColumnMatch for the given String constant, and allow unrecognized values. */ + public static SourceColumnMatch valueOf(String constant) { + return type.valueOf(constant); + } + + /** Return the known values for SourceColumnMatch. */ + public static SourceColumnMatch[] values() { + return type.values(); + } + } + @AutoValue.Builder public abstract static class Builder extends TableDefinition.Builder { @@ -79,8 +122,20 @@ public Builder setSourceUris(List sourceUris) { return setSourceUrisImmut(ImmutableList.copyOf(sourceUris)); } + abstract Builder setFileSetSpecTypeInner(String spec); + abstract Builder setSourceUrisImmut(ImmutableList sourceUris); + /** + * Defines how to interpret files denoted by URIs. By default the files are assumed to be data + * files (this can be specified explicitly via FILE_SET_SPEC_TYPE_FILE_SYSTEM_MATCH). A second + * option is "FILE_SET_SPEC_TYPE_NEW_LINE_DELIMITED_MANIFEST" which interprets each file as a + * manifest file, where each line is a reference to a file. + */ + public Builder setFileSetSpecType(String fileSetSpecType) { + return setFileSetSpecTypeInner(fileSetSpecType); + } + /** * Sets the source format, and possibly some parsing options, of the external data. Supported * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}. @@ -93,6 +148,17 @@ public Builder setFormatOptions(FormatOptions formatOptions) { return setFormatOptionsInner(formatOptions); } + /** + * Defines the list of possible SQL data types to which the source decimal values are converted. + * This list and the precision and the scale parameters of the decimal field determine the + * target type. In the order of NUMERIC, BIGNUMERIC, and STRING, a type is picked if it is in + * the specified list and if it supports the precision and the scale. STRING supports all + * precision and scale values. + * + * @param decimalTargetTypes decimalTargetType or {@code null} for none + */ + public abstract Builder setDecimalTargetTypes(List decimalTargetTypes); + abstract Builder setFormatOptionsInner(FormatOptions formatOptions); /** @@ -144,11 +210,93 @@ public Builder setFormatOptions(FormatOptions formatOptions) { /** Sets the table Hive partitioning options. */ public Builder setHivePartitioningOptions(HivePartitioningOptions hivePartitioningOptions) { return setHivePartitioningOptionsInner(hivePartitioningOptions); - }; + } + ; + + /** + * When creating an external table, the user can provide a reference file with the table schema. + * This is enabled for the following formats: AVRO, PARQUET, ORC. + * + * @param referenceFileSchemaUri or {@code null} for none + */ + public abstract Builder setReferenceFileSchemaUri(String referenceFileSchemaUri); abstract Builder setHivePartitioningOptionsInner( HivePartitioningOptions hivePartitioningOptions); + public Builder setObjectMetadata(String objectMetadata) { + return setObjectMetadataInner(objectMetadata); + } + + abstract Builder setObjectMetadataInner(String objectMetadata); + + /** + * [Optional] Metadata Cache Mode for the table. Set this to enable caching of metadata from + * external data source. + * + * @see + * MetadataCacheMode + */ + public Builder setMetadataCacheMode(String metadataCacheMode) { + return setMetadataCacheModeInner(metadataCacheMode); + } + + abstract Builder setMetadataCacheModeInner(String metadataCacheMode); + + /** + * [Optional] Metadata Cache Mode for the table. Set this to enable caching of metadata from + * external data source. + * + * @see + * MaxStaleness + */ + public Builder setMaxStaleness(String maxStaleness) { + return setMaxStalenessInner(maxStaleness); + } + + abstract Builder setMaxStalenessInner(String maxStaleness); + + /** + * Time zone used when parsing timestamp values that do not have specific time zone information + * (e.g. 2024-04-20 12:34:56). The expected format is a IANA timezone string (e.g. + * America/Los_Angeles). + */ + public abstract Builder setTimeZone(String timeZone); + + /** Format used to parse DATE values. Supports C-style and SQL-style values. */ + public abstract Builder setDateFormat(String dateFormat); + + /** Format used to parse DATETIME values. Supports C-style and SQL-style values. */ + public abstract Builder setDatetimeFormat(String datetimeFormat); + + /** Format used to parse TIME values. Supports C-style and SQL-style values. */ + public abstract Builder setTimeFormat(String timeFormat); + + /** Format used to parse TIMESTAMP values. Supports C-style and SQL-style values. */ + public abstract Builder setTimestampFormat(String timestampFormat); + + /** + * Controls the strategy used to match loaded columns to the schema. If not set, a sensible + * default is chosen based on how the schema is provided. If autodetect is used, then columns + * are matched by name. Otherwise, columns are matched by position. This is done to keep the + * behavior backward-compatible. Acceptable values are: POSITION - matches by position. This + * assumes that the columns are ordered the same way as the schema. NAME - matches by name. This + * reads the header row as column names and reorders columns to match the field names in the + * schema. + */ + public abstract Builder setSourceColumnMatch(SourceColumnMatch sourceColumnMatch); + + /** + * A list of strings represented as SQL NULL value in a CSV file. null_marker and null_markers + * can't be set at the same time. If null_marker is set, null_markers has to be not set. If + * null_markers is set, null_marker has to be not set. If both null_marker and null_markers are + * set at the same time, a user error would be thrown. Any strings listed in null_markers, + * including empty string would be interpreted as SQL NULL. This applies to all column types. + */ + public abstract Builder setNullMarkers(List nullMarkers); + /** Creates an {@code ExternalTableDefinition} object. */ @Override public abstract ExternalTableDefinition build(); @@ -188,7 +336,8 @@ abstract Builder setHivePartitioningOptionsInner( @Nullable public Boolean ignoreUnknownValues() { return getIgnoreUnknownValues(); - }; + } + ; @Nullable public abstract Boolean getIgnoreUnknownValues(); @@ -213,9 +362,63 @@ public List getSourceUris() { return getSourceUrisImmut(); } + @Nullable + public String getFileSetSpecType() { + return getFileSetSpecTypeInner(); + } + + @Nullable + abstract String getFileSetSpecTypeInner(); + @Nullable public abstract ImmutableList getSourceUrisImmut(); + /** + * Returns the object metadata. + * + * @see + * ObjectMetadata + */ + @Nullable + public String getObjectMetadata() { + return getObjectMetadataInner(); + } + + @Nullable + abstract String getObjectMetadataInner(); + + /** + * Returns the metadata cache mode. + * + * @see + * MetadataCacheMode + */ + @Nullable + public String getMetadataCacheMode() { + return getMetadataCacheModeInner(); + } + + @Nullable + abstract String getMetadataCacheModeInner(); + + /** + * Returns the maximum staleness of data that could be returned when the table is queried. + * Staleness encoded as a string encoding of sql IntervalValue type. + * + * @see + * MaxStaleness + */ + @Nullable + public String getMaxStaleness() { + return getMaxStalenessInner(); + } + + @Nullable + abstract String getMaxStalenessInner(); + /** * Returns the source format, and possibly some parsing options, of the external data. Supported * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}. @@ -229,6 +432,9 @@ public F getFormatOptions() { @Nullable abstract FormatOptions getFormatOptionsInner(); + @Nullable + public abstract ImmutableList getDecimalTargetTypes(); + /** * [Experimental] Returns whether automatic detection of schema and format options should be * performed. @@ -236,6 +442,9 @@ public F getFormatOptions() { @Nullable public abstract Boolean getAutodetect(); + @Nullable + public abstract String getReferenceFileSchemaUri(); + /** * [Experimental] Returns the HivePartitioningOptions when the data layout follows Hive * partitioning convention @@ -246,6 +455,37 @@ public HivePartitioningOptions getHivePartitioningOptions() { return getHivePartitioningOptionsInner(); } + /** + * Returns the time zone used when parsing timestamp values that don't have specific time zone + * information. + */ + @Nullable + public abstract String getTimeZone(); + + /** Returns the format used to parse DATE values. */ + @Nullable + public abstract String getDateFormat(); + + /** Returns the format used to parse DATETIME values. */ + @Nullable + public abstract String getDatetimeFormat(); + + /** Returns the format used to parse TIME values. */ + @Nullable + public abstract String getTimeFormat(); + + /** Returns the format used to parse TIMESTAMP values. */ + @Nullable + public abstract String getTimestampFormat(); + + /** Returns the strategy used to match loaded columns to the schema, either POSITION or NAME. */ + @Nullable + public abstract SourceColumnMatch getSourceColumnMatch(); + + /** Returns a list of strings represented as SQL NULL value in a CSV file. */ + @Nullable + public abstract List getNullMarkers(); + @Nullable abstract HivePartitioningOptions getHivePartitioningOptionsInner(); @@ -256,6 +496,9 @@ public HivePartitioningOptions getHivePartitioningOptions() { com.google.api.services.bigquery.model.Table toPb() { Table tablePb = super.toPb(); tablePb.setExternalDataConfiguration(toExternalDataConfigurationPb()); + if (getMaxStaleness() != null) { + tablePb.setMaxStaleness(getMaxStaleness()); + } return tablePb; } @@ -283,6 +526,15 @@ com.google.api.services.bigquery.model.ExternalDataConfiguration toExternalDataC if (getSourceUris() != null) { externalConfigurationPb.setSourceUris(getSourceUris()); } + if (getDecimalTargetTypes() != null) { + externalConfigurationPb.setDecimalTargetTypes(getDecimalTargetTypes()); + } + if (getFormatOptions() != null && FormatOptions.PARQUET.equals(getFormatOptions().getType())) { + externalConfigurationPb.setParquetOptions(((ParquetOptions) getFormatOptions()).toPb()); + } + if (getFormatOptions() != null && FormatOptions.AVRO.equals(getFormatOptions().getType())) { + externalConfigurationPb.setAvroOptions(((AvroOptions) getFormatOptions()).toPb()); + } if (getFormatOptions() != null && FormatOptions.CSV.equals(getFormatOptions().getType())) { externalConfigurationPb.setCsvOptions(((CsvOptions) getFormatOptions()).toPb()); } @@ -297,9 +549,48 @@ com.google.api.services.bigquery.model.ExternalDataConfiguration toExternalDataC if (getAutodetect() != null) { externalConfigurationPb.setAutodetect(getAutodetect()); } + if (getReferenceFileSchemaUri() != null) { + externalConfigurationPb.setReferenceFileSchemaUri(getReferenceFileSchemaUri()); + } + if (getHivePartitioningOptions() != null) { externalConfigurationPb.setHivePartitioningOptions(getHivePartitioningOptions().toPb()); } + if (getFileSetSpecType() != null) { + externalConfigurationPb.setFileSetSpecType(getFileSetSpecType()); + } + + if (getObjectMetadata() != null) { + externalConfigurationPb.setObjectMetadata(getObjectMetadata()); + } + + if (getMetadataCacheMode() != null) { + externalConfigurationPb.setMetadataCacheMode(getMetadataCacheMode()); + } + if (getTimeZone() != null) { + externalConfigurationPb.setTimeZone(getTimeZone()); + } + if (getDateFormat() != null) { + externalConfigurationPb.setDateFormat(getDateFormat()); + } + if (getDatetimeFormat() != null) { + externalConfigurationPb.setDatetimeFormat(getDatetimeFormat()); + } + if (getTimeFormat() != null) { + externalConfigurationPb.setTimeFormat(getTimeFormat()); + } + if (getTimestampFormat() != null) { + externalConfigurationPb.setTimestampFormat(getTimestampFormat()); + } + if (getSourceColumnMatch() != null) { + externalConfigurationPb + .getCsvOptions() + .setSourceColumnMatch(getSourceColumnMatch().toString()); + } + if (getNullMarkers() != null) { + externalConfigurationPb.getCsvOptions().setNullMarkers(getNullMarkers()); + } + return externalConfigurationPb; } @@ -364,6 +655,24 @@ public static Builder newBuilder(String sourceUri, FormatOptions format) { return newBuilder().setSourceUris(ImmutableList.of(sourceUri)).setFormatOptions(format); } + /** + * Creates a builder for an ExternalTableDefinition object. + * + * @param sourceUri the fully-qualified URIs that point to your data in Google Cloud. For Google + * Cloud Bigtable URIs: Exactly one URI can be specified and it has be a fully specified and + * valid HTTPS URL for a Google Cloud Bigtable table. Size limits related to load jobs apply + * to external data sources, plus an additional limit of 10 GB maximum size across all URIs. + * @return a builder for an ExternalTableDefinition object given source URIs and format + * @see Quota + * @see + * Source Format + */ + public static Builder newBuilder(String sourceUri) { + checkArgument(!isNullOrEmpty(sourceUri), "Provided sourceUri is null or empty"); + return newBuilder().setSourceUris(ImmutableList.of(sourceUri)); + } + /** * Creates an ExternalTableDefinition object. * @@ -430,6 +739,10 @@ static ExternalTableDefinition fromPb(Table tablePb) { if (externalDataConfiguration.getSourceUris() != null) { builder.setSourceUris(ImmutableList.copyOf(externalDataConfiguration.getSourceUris())); } + if (externalDataConfiguration.getDecimalTargetTypes() != null) { + builder.setDecimalTargetTypes( + ImmutableList.copyOf(externalDataConfiguration.getDecimalTargetTypes())); + } if (externalDataConfiguration.getSourceFormat() != null) { builder.setFormatOptions(FormatOptions.of(externalDataConfiguration.getSourceFormat())); } @@ -438,6 +751,9 @@ static ExternalTableDefinition fromPb(Table tablePb) { builder.setConnectionId(externalDataConfiguration.getConnectionId()); } builder.setIgnoreUnknownValues(externalDataConfiguration.getIgnoreUnknownValues()); + if (externalDataConfiguration.getAvroOptions() != null) { + builder.setFormatOptions(AvroOptions.fromPb(externalDataConfiguration.getAvroOptions())); + } if (externalDataConfiguration.getCsvOptions() != null) { builder.setFormatOptions(CsvOptions.fromPb(externalDataConfiguration.getCsvOptions())); } @@ -449,12 +765,56 @@ static ExternalTableDefinition fromPb(Table tablePb) { builder.setFormatOptions( BigtableOptions.fromPb(externalDataConfiguration.getBigtableOptions())); } + if (externalDataConfiguration.getParquetOptions() != null) { + builder.setFormatOptions( + ParquetOptions.fromPb(externalDataConfiguration.getParquetOptions())); + } builder.setMaxBadRecords(externalDataConfiguration.getMaxBadRecords()); builder.setAutodetect(externalDataConfiguration.getAutodetect()); if (externalDataConfiguration.getHivePartitioningOptions() != null) { builder.setHivePartitioningOptions( HivePartitioningOptions.fromPb(externalDataConfiguration.getHivePartitioningOptions())); } + if (externalDataConfiguration.getReferenceFileSchemaUri() != null) { + builder.setReferenceFileSchemaUri(externalDataConfiguration.getReferenceFileSchemaUri()); + } + if (externalDataConfiguration.getFileSetSpecType() != null) { + builder.setFileSetSpecType(externalDataConfiguration.getFileSetSpecType()); + } + if (externalDataConfiguration.getObjectMetadata() != null) { + builder.setObjectMetadata(externalDataConfiguration.getObjectMetadata()); + } + if (externalDataConfiguration.getMetadataCacheMode() != null) { + builder.setMetadataCacheMode(externalDataConfiguration.getMetadataCacheMode()); + } + if (tablePb.getMaxStaleness() != null) { + builder.setMaxStaleness(tablePb.getMaxStaleness()); + } + if (externalDataConfiguration.getTimeZone() != null) { + builder.setTimeZone(externalDataConfiguration.getTimeZone()); + } + if (externalDataConfiguration.getDateFormat() != null) { + builder.setDateFormat(externalDataConfiguration.getDateFormat()); + } + if (externalDataConfiguration.getDatetimeFormat() != null) { + builder.setDatetimeFormat(externalDataConfiguration.getDatetimeFormat()); + } + if (externalDataConfiguration.getTimeFormat() != null) { + builder.setTimeFormat(externalDataConfiguration.getTimeFormat()); + } + if (externalDataConfiguration.getTimestampFormat() != null) { + builder.setTimestampFormat(externalDataConfiguration.getTimestampFormat()); + } + if (externalDataConfiguration.getCsvOptions() != null) { + if (externalDataConfiguration.getCsvOptions().getSourceColumnMatch() != null) { + builder.setSourceColumnMatch( + SourceColumnMatch.valueOf( + externalDataConfiguration.getCsvOptions().getSourceColumnMatch())); + } + if (externalDataConfiguration.getCsvOptions().getNullMarkers() != null) { + builder.setNullMarkers(externalDataConfiguration.getCsvOptions().getNullMarkers()); + } + } } return builder.build(); } @@ -465,6 +825,9 @@ static ExternalTableDefinition fromExternalDataConfiguration( if (externalDataConfiguration.getSourceUris() != null) { builder.setSourceUris(externalDataConfiguration.getSourceUris()); } + if (externalDataConfiguration.getDecimalTargetTypes() != null) { + builder.setDecimalTargetTypes(externalDataConfiguration.getDecimalTargetTypes()); + } if (externalDataConfiguration.getSchema() != null) { builder.setSchema(Schema.fromPb(externalDataConfiguration.getSchema())); } @@ -480,6 +843,9 @@ static ExternalTableDefinition fromExternalDataConfiguration( if (externalDataConfiguration.getIgnoreUnknownValues() != null) { builder.setIgnoreUnknownValues(externalDataConfiguration.getIgnoreUnknownValues()); } + if (externalDataConfiguration.getAvroOptions() != null) { + builder.setFormatOptions(AvroOptions.fromPb(externalDataConfiguration.getAvroOptions())); + } if (externalDataConfiguration.getCsvOptions() != null) { builder.setFormatOptions(CsvOptions.fromPb(externalDataConfiguration.getCsvOptions())); } @@ -491,16 +857,60 @@ static ExternalTableDefinition fromExternalDataConfiguration( builder.setFormatOptions( BigtableOptions.fromPb(externalDataConfiguration.getBigtableOptions())); } + if (externalDataConfiguration.getParquetOptions() != null) { + builder.setFormatOptions( + ParquetOptions.fromPb(externalDataConfiguration.getParquetOptions())); + } if (externalDataConfiguration.getMaxBadRecords() != null) { builder.setMaxBadRecords(externalDataConfiguration.getMaxBadRecords()); } if (externalDataConfiguration.getAutodetect() != null) { builder.setAutodetect(externalDataConfiguration.getAutodetect()); } + if (externalDataConfiguration.getReferenceFileSchemaUri() != null) { + builder.setReferenceFileSchemaUri(externalDataConfiguration.getReferenceFileSchemaUri()); + } if (externalDataConfiguration.getHivePartitioningOptions() != null) { builder.setHivePartitioningOptions( HivePartitioningOptions.fromPb(externalDataConfiguration.getHivePartitioningOptions())); } + if (externalDataConfiguration.getFileSetSpecType() != null) { + builder.setFileSetSpecType(externalDataConfiguration.getFileSetSpecType()); + } + + if (externalDataConfiguration.getObjectMetadata() != null) { + builder.setObjectMetadata(externalDataConfiguration.getObjectMetadata()); + } + + if (externalDataConfiguration.getMetadataCacheMode() != null) { + builder.setMetadataCacheMode(externalDataConfiguration.getMetadataCacheMode()); + } + if (externalDataConfiguration.getTimeZone() != null) { + builder.setTimeZone(externalDataConfiguration.getTimeZone()); + } + if (externalDataConfiguration.getDateFormat() != null) { + builder.setDateFormat(externalDataConfiguration.getDateFormat()); + } + if (externalDataConfiguration.getDatetimeFormat() != null) { + builder.setDatetimeFormat(externalDataConfiguration.getDatetimeFormat()); + } + if (externalDataConfiguration.getTimeFormat() != null) { + builder.setTimeFormat(externalDataConfiguration.getTimeFormat()); + } + if (externalDataConfiguration.getTimestampFormat() != null) { + builder.setTimestampFormat(externalDataConfiguration.getTimeFormat()); + } + if (externalDataConfiguration.getCsvOptions() != null) { + if (externalDataConfiguration.getCsvOptions().getSourceColumnMatch() != null) { + builder.setSourceColumnMatch( + SourceColumnMatch.valueOf( + externalDataConfiguration.getCsvOptions().getSourceColumnMatch())); + } + if (externalDataConfiguration.getCsvOptions().getNullMarkers() != null) { + builder.setNullMarkers(externalDataConfiguration.getCsvOptions().getNullMarkers()); + } + } + return builder.build(); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java index 29a256e9eb..d79959ee04 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java @@ -47,6 +47,7 @@ public final class ExtractJobConfiguration extends JobConfiguration { private final Boolean useAvroLogicalTypes; private final Map labels; private final Long jobTimeoutMs; + private final String reservation; public static final class Builder extends JobConfiguration.Builder { @@ -61,6 +62,7 @@ public static final class Builder private Boolean useAvroLogicalTypes; private Map labels; private Long jobTimeoutMs; + private String reservation; private Builder() { super(Type.EXTRACT); @@ -78,6 +80,7 @@ private Builder(ExtractJobConfiguration jobInfo) { this.useAvroLogicalTypes = jobInfo.useAvroLogicalTypes; this.labels = jobInfo.labels; this.jobTimeoutMs = jobInfo.jobTimeoutMs; + this.reservation = jobInfo.reservation; } private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { @@ -101,6 +104,9 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur if (configurationPb.getJobTimeoutMs() != null) { this.jobTimeoutMs = configurationPb.getJobTimeoutMs(); } + if (configurationPb.getReservation() != null) { + this.reservation = configurationPb.getReservation(); + } } /** Sets the table to export. */ @@ -198,6 +204,19 @@ public Builder setJobTimeoutMs(Long jobTimeoutMs) { return this; } + /** + * [Optional] The reservation that job would use. User can specify a reservation to execute the + * job. If reservation is not set, reservation is determined based on the rules defined by the + * reservation assignments. The expected format is + * `projects/{project}/locations/{location}/reservations/{reservation}`. + * + * @param reservation reservation or {@code null} for none + */ + public Builder setReservation(String reservation) { + this.reservation = reservation; + return this; + } + public ExtractJobConfiguration build() { return new ExtractJobConfiguration(this); } @@ -215,6 +234,7 @@ private ExtractJobConfiguration(Builder builder) { this.useAvroLogicalTypes = builder.useAvroLogicalTypes; this.labels = builder.labels; this.jobTimeoutMs = builder.jobTimeoutMs; + this.reservation = builder.reservation; } /** Returns the table to export. */ @@ -274,6 +294,11 @@ public Long getJobTimeoutMs() { return jobTimeoutMs; } + /** Returns the reservation associated with this job */ + public String getReservation() { + return reservation; + } + @Override public Builder toBuilder() { return new Builder(this); @@ -291,7 +316,8 @@ ToStringHelper toStringHelper() { .add("compression", compression) .add("useAvroLogicalTypes", useAvroLogicalTypes) .add("labels", labels) - .add("jobTimeoutMs", jobTimeoutMs); + .add("jobTimeoutMs", jobTimeoutMs) + .add("reservation", reservation); } @Override @@ -313,7 +339,8 @@ public int hashCode() { compression, useAvroLogicalTypes, labels, - jobTimeoutMs); + jobTimeoutMs, + reservation); } @Override @@ -350,6 +377,9 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { if (jobTimeoutMs != null) { jobConfiguration.setJobTimeoutMs(jobTimeoutMs); } + if (reservation != null) { + jobConfiguration.setReservation(reservation); + } jobConfiguration.setExtract(extractConfigurationPb); return jobConfiguration; } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java index 1213bc4e8b..88e09c5c48 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java @@ -25,6 +25,7 @@ import com.google.api.services.bigquery.model.TableFieldSchema; import com.google.common.base.Function; import com.google.common.base.MoreObjects; +import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import java.io.Serializable; import java.util.List; @@ -59,6 +60,13 @@ public TableFieldSchema apply(Field field) { private final String mode; private final String description; private final PolicyTags policyTags; + private final Long maxLength; + private final Long scale; + private final Long precision; + private final Long timestampPrecision; + private final String defaultValueExpression; + private final String collation; + private final FieldElementType rangeElementType; /** * Mode for a BigQuery Table field. {@link Mode#NULLABLE} fields can be set to {@code null}, @@ -79,6 +87,13 @@ public static final class Builder { private String mode; private String description; private PolicyTags policyTags; + private Long maxLength; + private Long scale; + private Long precision; + private Long timestampPrecision; + private String defaultValueExpression; + private String collation; + private FieldElementType rangeElementType; private Builder() {} @@ -89,6 +104,13 @@ private Builder(Field field) { this.mode = field.mode; this.description = field.description; this.policyTags = field.policyTags; + this.maxLength = field.maxLength; + this.scale = field.scale; + this.precision = field.precision; + this.timestampPrecision = field.timestampPrecision; + this.defaultValueExpression = field.defaultValueExpression; + this.collation = field.collation; + this.rangeElementType = field.rangeElementType; } /** @@ -183,7 +205,7 @@ public Builder setType(StandardSQLTypeName type, FieldList subFields) { /** Sets the mode of the field. When not specified {@link Mode#NULLABLE} is used. */ public Builder setMode(Mode mode) { - this.mode = mode != null ? mode.name() : Data.nullOf(String.class); + this.mode = mode != null ? mode.name() : null; return this; } @@ -199,6 +221,103 @@ public Builder setPolicyTags(PolicyTags policyTags) { return this; } + /** + * Sets the maximum length of the field for STRING or BYTES type. + * + *

    It is invalid to set value for types other than STRING or BYTES. + * + *

    For STRING type, this represents the maximum UTF-8 length of strings allowed in the field. + * For BYTES type, this represents the maximum number of bytes in the field. + */ + public Builder setMaxLength(Long maxLength) { + this.maxLength = maxLength; + return this; + } + + /** + * Scale can be used to constrain the maximum number of digits in the fractional part of a + * NUMERIC or BIGNUMERIC type. If the Scale value is set, the Precision value must be set as + * well. It is invalid to set values for Scale for types other than NUMERIC or BIGNUMERIC. See + * the Precision field for additional guidance about valid values. + */ + public Builder setScale(Long scale) { + this.scale = scale; + return this; + } + + /** + * Precision can be used to constrain the maximum number of total digits allowed for NUMERIC or + * BIGNUMERIC types. It is invalid to set values for Precision for types other than // NUMERIC + * or BIGNUMERIC. For NUMERIC type, acceptable values for Precision must be: 1 ≤ (Precision - + * Scale) ≤ 29. Values for Scale must be: 0 ≤ Scale ≤ 9. For BIGNUMERIC type, acceptable values + * for Precision must be: 1 ≤ (Precision - Scale) ≤ 38. Values for Scale must be: 0 ≤ Scale ≤ + * 38. + */ + public Builder setPrecision(Long precision) { + this.precision = precision; + return this; + } + + /** + * Specifies the precision for TIMESTAMP types. + * + *

    The default value is 6. Possible values are 6 (microsecond) or 12 (picosecond). + */ + public Builder setTimestampPrecision(Long timestampPrecision) { + Preconditions.checkArgument( + timestampPrecision == 6L || timestampPrecision == 12L, + "Timestamp Precision must be 6 (microsecond) or 12 (picosecond)"); + this.timestampPrecision = timestampPrecision; + return this; + } + + /** + * DefaultValueExpression is used to specify the default value of a field using a SQL + * expression. It can only be set for top level fields (columns). + * + *

    You can use struct or array expression to specify default value for the entire struct or + * array. The valid SQL expressions are: + * + *

    +     *   Literals for all data types, including STRUCT and ARRAY.
    +     *   The following functions:
    +     *      - CURRENT_TIMESTAMP
    +     *      - CURRENT_TIME
    +     *      - CURRENT_DATE
    +     *      - CURRENT_DATETIME
    +     *      - GENERATE_UUID
    +     *      - RAND
    +     *      - SESSION_USER
    +     *      - ST_GEOGPOINT
    +     *
    +     *   Struct or array composed with the above allowed functions, for example:
    +     *      "[CURRENT_DATE(), DATE '2020-01-01']"
    +     * 
    + */ + public Builder setDefaultValueExpression(String defaultValueExpression) { + this.defaultValueExpression = defaultValueExpression; + return this; + } + + /** + * Optional. Field collation can be set only when the type of field is STRING. The following + * values are supported: + * + *

    * 'und:ci': undetermined locale, case insensitive. * '': empty string. Default to + * case-sensitive behavior. (-- A wrapper is used here because it is possible to set the value + * to the empty string. --) + */ + public Builder setCollation(String collation) { + this.collation = collation; + return this; + } + + /** Optional. Field range element type can be set only when the type of field is RANGE. */ + public Builder setRangeElementType(FieldElementType rangeElementType) { + this.rangeElementType = rangeElementType; + return this; + } + /** Creates a {@code Field} object. */ public Field build() { return new Field(this); @@ -212,6 +331,13 @@ private Field(Builder builder) { this.mode = builder.mode; this.description = builder.description; this.policyTags = builder.policyTags; + this.maxLength = builder.maxLength; + this.scale = builder.scale; + this.precision = builder.precision; + this.timestampPrecision = builder.timestampPrecision; + this.defaultValueExpression = builder.defaultValueExpression; + this.collation = builder.collation; + this.rangeElementType = builder.rangeElementType; } /** Returns the field name. */ @@ -244,6 +370,43 @@ public PolicyTags getPolicyTags() { return policyTags; } + /** Returns the maximum length of the field for STRING or BYTES type. */ + public Long getMaxLength() { + return maxLength; + } + + /** + * Returns the maximum number of digits set in the fractional part of a NUMERIC or BIGNUMERIC + * type. + */ + public Long getScale() { + return scale; + } + + /** Returns the maximum number of total digits allowed for NUMERIC or BIGNUMERIC types. */ + public Long getPrecision() { + return precision; + } + + /** Returns the precision for TIMESTAMP type. */ + public Long getTimestampPrecision() { + return timestampPrecision; + } + + /** Return the default value of the field. */ + public String getDefaultValueExpression() { + return defaultValueExpression; + } + + public String getCollation() { + return collation; + } + + /** Return the range element type the field. */ + public FieldElementType getRangeElementType() { + return rangeElementType; + } + /** * Returns the list of sub-fields if {@link #getType()} is a {@link LegacySQLTypeName#RECORD}. * Returns {@code null} otherwise. @@ -265,12 +428,31 @@ public String toString() { .add("mode", mode) .add("description", description) .add("policyTags", policyTags) + .add("maxLength", maxLength) + .add("scale", scale) + .add("precision", precision) + .add("timestampPrecision", timestampPrecision) + .add("defaultValueExpression", defaultValueExpression) + .add("collation", collation) + .add("rangeElementType", rangeElementType) .toString(); } @Override public int hashCode() { - return Objects.hash(name, type, mode, description, policyTags); + return Objects.hash( + name, + type, + mode, + description, + policyTags, + maxLength, + scale, + precision, + timestampPrecision, + defaultValueExpression, + collation, + rangeElementType); } @Override @@ -335,10 +517,31 @@ TableFieldSchema toPb() { if (policyTags != null) { fieldSchemaPb.setPolicyTags(policyTags.toPb()); } + if (maxLength != null) { + fieldSchemaPb.setMaxLength(maxLength); + } + if (scale != null) { + fieldSchemaPb.setScale(scale); + } + if (precision != null) { + fieldSchemaPb.setPrecision(precision); + } + if (timestampPrecision != null) { + fieldSchemaPb.setTimestampPrecision(timestampPrecision); + } + if (defaultValueExpression != null) { + fieldSchemaPb.setDefaultValueExpression(defaultValueExpression); + } if (getSubFields() != null) { List fieldsPb = Lists.transform(getSubFields(), TO_PB_FUNCTION); fieldSchemaPb.setFields(fieldsPb); } + if (collation != null) { + fieldSchemaPb.setCollation(collation); + } + if (rangeElementType != null) { + fieldSchemaPb.setRangeElementType(rangeElementType.toPb()); + } return fieldSchemaPb; } @@ -354,11 +557,33 @@ static Field fromPb(TableFieldSchema fieldSchemaPb) { if (fieldSchemaPb.getPolicyTags() != null) { fieldBuilder.setPolicyTags(PolicyTags.fromPb(fieldSchemaPb.getPolicyTags())); } + if (fieldSchemaPb.getMaxLength() != null) { + fieldBuilder.setMaxLength(fieldSchemaPb.getMaxLength()); + } + if (fieldSchemaPb.getScale() != null) { + fieldBuilder.setScale(fieldSchemaPb.getScale()); + } + if (fieldSchemaPb.getPrecision() != null) { + fieldBuilder.setPrecision(fieldSchemaPb.getPrecision()); + } + if (fieldSchemaPb.getTimestampPrecision() != null) { + fieldBuilder.setTimestampPrecision(fieldSchemaPb.getTimestampPrecision()); + } + if (fieldSchemaPb.getDefaultValueExpression() != null) { + fieldBuilder.setDefaultValueExpression(fieldSchemaPb.getDefaultValueExpression()); + } FieldList subFields = fieldSchemaPb.getFields() != null ? FieldList.of(Lists.transform(fieldSchemaPb.getFields(), FROM_PB_FUNCTION)) : null; fieldBuilder.setType(LegacySQLTypeName.valueOf(fieldSchemaPb.getType()), subFields); + if (fieldSchemaPb.getCollation() != null) { + fieldBuilder.setCollation(fieldSchemaPb.getCollation()); + } + if (fieldSchemaPb.getRangeElementType() != null) { + fieldBuilder.setRangeElementType( + FieldElementType.fromPb(fieldSchemaPb.getRangeElementType())); + } return fieldBuilder.build(); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldElementType.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldElementType.java new file mode 100644 index 0000000000..de601151e1 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldElementType.java @@ -0,0 +1,74 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import com.google.api.services.bigquery.model.QueryParameterType; +import com.google.api.services.bigquery.model.TableFieldSchema; +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import javax.annotation.Nullable; + +@AutoValue +public abstract class FieldElementType implements Serializable { + + private static final long serialVersionUID = 1L; + + /** + * The subtype of the RANGE, if the field type is RANGE. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract String getType(); + + public abstract FieldElementType.Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + + public abstract FieldElementType.Builder setType(String type); + + public abstract FieldElementType build(); + } + + public static Builder newBuilder() { + return new AutoValue_FieldElementType.Builder(); + } + + TableFieldSchema.RangeElementType toPb() { + TableFieldSchema.RangeElementType rangeElementTypePb = new TableFieldSchema.RangeElementType(); + rangeElementTypePb.setType(getType()); + return rangeElementTypePb; + } + + static FieldElementType fromPb(TableFieldSchema.RangeElementType rangeElementTypePb) { + // Treat a FieldElementType message without a Type subfield as invalid. + if (rangeElementTypePb.getType() != null) { + return newBuilder().setType(rangeElementTypePb.getType()).build(); + } + return null; + } + + /** Creates an instance of FieldElementType from QueryParameterType with RangeElementType. */ + static FieldElementType fromPb(QueryParameterType queryParameterTypePb) { + // Treat a FieldElementType message without a Type subfield as invalid. + if ((queryParameterTypePb.getRangeElementType() != null) + && (queryParameterTypePb.getRangeElementType().getType() != null)) { + return newBuilder().setType(queryParameterTypePb.getRangeElementType().getType()).build(); + } + return null; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java index a2ce187678..d11df4b956 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java @@ -18,6 +18,7 @@ import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; +import static java.time.temporal.ChronoUnit.MICROS; import com.google.api.client.util.Data; import com.google.api.core.BetaApi; @@ -25,9 +26,18 @@ import com.google.common.io.BaseEncoding; import java.io.Serializable; import java.math.BigDecimal; +import java.math.BigInteger; +import java.math.RoundingMode; +import java.time.Duration; +import java.time.Instant; +import java.time.Period; +import java.time.format.DateTimeParseException; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import org.threeten.extra.PeriodDuration; /** * Google BigQuery Table Field Value class. Objects of this class represent values of a BigQuery @@ -35,11 +45,13 @@ * query or when listing table data. */ public class FieldValue implements Serializable { + private static final int MICROSECONDS = 1000000; - private static final long serialVersionUID = 469098630191710061L; + private static final long serialVersionUID = 469098630191710062L; private final Attribute attribute; private final Object value; + private final Boolean useInt64Timestamps; /** The field value's attribute, giving information on the field's content type. */ public enum Attribute { @@ -57,12 +69,20 @@ public enum Attribute { REPEATED, /** A {@code FieldValue} for a field of type {@link LegacySQLTypeName#RECORD}. */ - RECORD + RECORD, + + /** A {@code FieldValue} for a field of type {@link LegacySQLTypeName#RANGE}. */ + RANGE } private FieldValue(Attribute attribute, Object value) { + this(attribute, value, false); + } + + private FieldValue(Attribute attribute, Object value, Boolean useInt64Timestamps) { this.attribute = checkNotNull(attribute); this.value = value; + this.useInt64Timestamps = useInt64Timestamps; } /** @@ -94,6 +114,10 @@ public Object getValue() { return value; } + public Boolean getUseInt64Timestamps() { + return useInt64Timestamps; + } + /** * Returns this field's value as a {@link String}. This method should only be used if the * corresponding field has primitive type ({@link LegacySQLTypeName#BYTES}, {@link @@ -110,6 +134,20 @@ public String getStringValue() { return (String) value; } + /** + * Returns this field's value as a {@link String}, or defaultValue if {@link #isNull()} returns + * {@code true}. See {@link #getStringValue()} for more details. + * + * @throws ClassCastException if the field is not a primitive type + */ + @SuppressWarnings("unchecked") + public String getStringValueOrDefault(String defaultValue) { + if (isNull()) { + return defaultValue; + } + return getStringValue(); + } + /** * Returns this field's value as a byte array. This method should only be used if the * corresponding field has primitive type ({@link LegacySQLTypeName#BYTES}. @@ -180,13 +218,34 @@ public boolean getBooleanValue() { */ @SuppressWarnings("unchecked") public long getTimestampValue() { + if (useInt64Timestamps) { + return new BigInteger(getStringValue()).longValue(); + } // timestamps are encoded in the format 1408452095.22 where the integer part is seconds since // epoch (e.g. 1408452095.22 == 2014-08-19 07:41:35.220 -05:00) BigDecimal secondsWithMicro = new BigDecimal(getStringValue()); - BigDecimal scaled = secondsWithMicro.scaleByPowerOfTen(6); + // Rounding the BigDecimal to the nearest whole number before setting the longValue in order to + // address TimeStamp rounding issue described in + // https://github.com/googleapis/java-bigquery/issues/1644 + BigDecimal scaled = secondsWithMicro.scaleByPowerOfTen(6).setScale(0, RoundingMode.HALF_UP); return scaled.longValue(); } + /** + * Returns this field's value as a {@code String}, representing a timestamp as an Instant. This + * method should only be used if the corresponding field has {@link LegacySQLTypeName#TIMESTAMP} + * type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NumberFormatException if the field's value could not be converted to {@link Long} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public Instant getTimestampInstant() { + checkNotNull(value); + return Instant.EPOCH.plus(getTimestampValue(), MICROS); + } + /** * Returns this field's value as a {@link java.math.BigDecimal}. This method should only be used * if the corresponding field has {@link LegacySQLTypeName#NUMERIC} type. @@ -201,6 +260,23 @@ public BigDecimal getNumericValue() { return new BigDecimal(getStringValue()); } + /** + * Returns this field's value as a {@link Range}. This method should only be used * if the + * corresponding field has {@link LegacySQLTypeName#RANGE} type. + * + * @throws ClassCastException if the field is not a primitive type + * @throws IllegalArgumentException if the field's value could not be converted to {@link Range} + * @throws NullPointerException if {@link #isNull()} returns {@code true} + */ + @SuppressWarnings("unchecked") + public Range getRangeValue() { + if (attribute == Attribute.RANGE) { + return (Range) value; + } + // Provide best effort to convert value to Range object. + return Range.of(getStringValue()); + } + /** * Returns this field's value as a list of {@link FieldValue}. This method should only be used if * the corresponding field has {@link Field.Mode#REPEATED} mode (i.e. {@link #getAttribute()} is @@ -215,6 +291,28 @@ public List getRepeatedValue() { return (List) value; } + /** + * Returns this field's value as a {@link org.threeten.extra.PeriodDuration}. This method should + * be used if the corresponding field has {@link StandardSQLTypeName#INTERVAL} type, or if it is a + * legal canonical format "[sign]Y-M [sign]D [sign]H:M:S[.F]", e.g. "123-7 -19 0:24:12.000006" or + * ISO 8601. + * + * @throws ClassCastException if the field is not a primitive type + * @throws NullPointerException if {@link #isNull()} returns {@code true} + * @throws IllegalArgumentException if the field cannot be converted to a legal interval + */ + @SuppressWarnings("unchecked") + public PeriodDuration getPeriodDuration() { + checkNotNull(value); + try { + // Try parsing from ISO 8601 + return PeriodDuration.parse(getStringValue()); + } catch (DateTimeParseException dateTimeParseException) { + // Try parsing from canonical interval format + return parseCanonicalInterval(getStringValue()); + } + } + /** * Returns this field's value as a {@link FieldValueList} instance. This method should only be * used if the corresponding field has {@link LegacySQLTypeName#RECORD} type (i.e. {@link @@ -233,12 +331,13 @@ public String toString() { return MoreObjects.toStringHelper(this) .add("attribute", attribute) .add("value", value) + .add("useInt64Timestamps", useInt64Timestamps) .toString(); } @Override public final int hashCode() { - return Objects.hash(attribute, value); + return Objects.hash(attribute, value, useInt64Timestamps); } @Override @@ -250,7 +349,9 @@ public final boolean equals(Object obj) { return false; } FieldValue other = (FieldValue) obj; - return attribute == other.attribute && Objects.equals(value, other.value); + return attribute == other.attribute + && Objects.equals(value, other.value) + && Objects.equals(useInt64Timestamps, other.useInt64Timestamps); } /** @@ -269,23 +370,38 @@ public final boolean equals(Object obj) { */ @BetaApi public static FieldValue of(Attribute attribute, Object value) { - return new FieldValue(attribute, value); + return of(attribute, value, false); + } + + @BetaApi + public static FieldValue of(Attribute attribute, Object value, Boolean useInt64Timestamps) { + return new FieldValue(attribute, value, useInt64Timestamps); } static FieldValue fromPb(Object cellPb) { - return fromPb(cellPb, null); + return fromPb(cellPb, null, false); } @SuppressWarnings("unchecked") - static FieldValue fromPb(Object cellPb, Field recordSchema) { + static FieldValue fromPb(Object cellPb, Field recordSchema, Boolean useInt64Timestamps) { if (Data.isNull(cellPb)) { - return FieldValue.of(Attribute.PRIMITIVE, null); + return FieldValue.of(Attribute.PRIMITIVE, null, useInt64Timestamps); } if (cellPb instanceof String) { - return FieldValue.of(Attribute.PRIMITIVE, cellPb); + if ((recordSchema != null) + && (recordSchema.getType() == LegacySQLTypeName.RANGE) + && (recordSchema.getRangeElementType() != null)) { + return FieldValue.of( + Attribute.RANGE, + Range.of((String) cellPb, recordSchema.getRangeElementType()), + useInt64Timestamps); + } + return FieldValue.of(Attribute.PRIMITIVE, cellPb, useInt64Timestamps); } if (cellPb instanceof List) { - return FieldValue.of(Attribute.REPEATED, FieldValueList.fromPb((List) cellPb, null)); + return FieldValue.of( + Attribute.REPEATED, + FieldValueList.fromPb((List) cellPb, null, useInt64Timestamps)); } if (cellPb instanceof Map) { Map cellMapPb = (Map) cellPb; @@ -293,14 +409,74 @@ static FieldValue fromPb(Object cellPb, Field recordSchema) { FieldList subFieldsSchema = recordSchema != null ? recordSchema.getSubFields() : null; return FieldValue.of( Attribute.RECORD, - FieldValueList.fromPb((List) cellMapPb.get("f"), subFieldsSchema)); + FieldValueList.fromPb( + (List) cellMapPb.get("f"), subFieldsSchema, useInt64Timestamps)); } // This should never be the case when we are processing a first level table field (i.e. a // row's field, not a record sub-field) if (cellMapPb.containsKey("v")) { - return FieldValue.fromPb(cellMapPb.get("v"), recordSchema); + return FieldValue.fromPb(cellMapPb.get("v"), recordSchema, useInt64Timestamps); } } throw new IllegalArgumentException("Unexpected table cell format"); } + + /** + * Parse interval in canonical format and create instance of {@code PeriodDuration}. + * + *

    The parameter {@code interval} should be an interval in the canonical format: "[sign]Y-M + * [sign]D [sign]H:M:S[.F]". More details + * here + * + * @throws IllegalArgumentException if the {@code interval} is not a valid interval + */ + static PeriodDuration parseCanonicalInterval(String interval) throws IllegalArgumentException { + // Pattern is [sign]Y-M [sign]D [sign]H:M:S[.F] + Pattern pattern = + Pattern.compile( + "(?[+-])?(?\\d+)-(?\\d+) (?[-|+])?(?\\d+) (?[-|+])?(?\\d+):(?\\d+):(?\\d+)(\\.(?\\d+))?"); + Matcher matcher = pattern.matcher(interval); + if (!matcher.find()) { + throw new IllegalArgumentException(); + } + String sign1 = matcher.group("sign1"); + String year = matcher.group("year"); + String month = matcher.group("month"); + String sign2 = matcher.group("sign2"); + String day = matcher.group("day"); + String sign3 = matcher.group("sign3"); + String hours = matcher.group("hours"); + String minutes = matcher.group("minutes"); + String seconds = matcher.group("seconds"); + String fraction = matcher.group("fraction"); + + int yearInt = Integer.parseInt(year); + int monthInt = Integer.parseInt(month); + if (Objects.equals(sign1, "-")) { + yearInt *= -1; + monthInt *= -1; + } + + int dayInt = Integer.parseInt(day); + if (Objects.equals(sign2, "-")) { + dayInt *= -1; + } + if (sign3 == null) { + sign3 = ""; + } + + String durationString = + sign3 + + "PT" + + hours + + "H" + + minutes + + "M" + + seconds + + (fraction == null ? "" : "." + fraction) + + "S"; + + return PeriodDuration.of(Period.of(yearInt, monthInt, dayInt), Duration.parse(durationString)); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValueList.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValueList.java index 5035bb164a..18d2155a58 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValueList.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValueList.java @@ -112,6 +112,10 @@ FieldValueList withSchema(FieldList schema) { } static FieldValueList fromPb(List rowPb, FieldList schema) { + return fromPb(rowPb, schema, false); + } + + static FieldValueList fromPb(List rowPb, FieldList schema, Boolean useInt64Timestamps) { List row = new ArrayList<>(rowPb.size()); if (schema != null) { if (schema.size() != rowPb.size()) { @@ -120,11 +124,11 @@ static FieldValueList fromPb(List rowPb, FieldList schema) { Iterator schemaIter = schema.iterator(); Iterator rowPbIter = rowPb.iterator(); while (rowPbIter.hasNext() && schemaIter.hasNext()) { - row.add(FieldValue.fromPb(rowPbIter.next(), schemaIter.next())); + row.add(FieldValue.fromPb(rowPbIter.next(), schemaIter.next(), useInt64Timestamps)); } } else { for (Object cellPb : rowPb) { - row.add(FieldValue.fromPb(cellPb, null)); + row.add(FieldValue.fromPb(cellPb, null, useInt64Timestamps)); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ForeignKey.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ForeignKey.java new file mode 100644 index 0000000000..69c2f74346 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ForeignKey.java @@ -0,0 +1,110 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import com.google.common.annotations.VisibleForTesting; +import java.io.Serializable; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nullable; + +@AutoValue +public abstract class ForeignKey implements Serializable { + public static ForeignKey.Builder newBuilder() { + return new AutoValue_ForeignKey.Builder(); + } + + static ForeignKey fromPb( + com.google.api.services.bigquery.model.TableConstraints.ForeignKeys foreignKey) { + ForeignKey.Builder builder = newBuilder(); + + if (foreignKey.getName() != null) { + builder.setName(foreignKey.getName()); + } + + if (foreignKey.getReferencedTable() != null) { + com.google.api.services.bigquery.model.TableConstraints.ForeignKeys.ReferencedTable + referencedTable = foreignKey.getReferencedTable(); + builder.setReferencedTable( + TableId.of( + referencedTable.getProjectId(), + referencedTable.getDatasetId(), + referencedTable.getTableId())); + } + + if (foreignKey.getColumnReferences() != null) { + builder.setColumnReferences( + foreignKey.getColumnReferences().stream() + .map(ColumnReference::fromPb) + .collect(Collectors.toList())); + } + + return builder.build(); + } + + com.google.api.services.bigquery.model.TableConstraints.ForeignKeys toPb() { + + com.google.api.services.bigquery.model.TableConstraints.ForeignKeys foreignKey = + new com.google.api.services.bigquery.model.TableConstraints.ForeignKeys(); + if (getName() != null) { + foreignKey.setName(getName()); + } + if (getReferencedTable() != null) { + TableId referencedTableId = getReferencedTable(); + foreignKey.setReferencedTable( + new com.google.api.services.bigquery.model.TableConstraints.ForeignKeys.ReferencedTable() + .setTableId(referencedTableId.getTable()) + .setDatasetId(referencedTableId.getDataset()) + .setProjectId(referencedTableId.getProject())); + } + if (getColumnReferences() != null) { + foreignKey.setColumnReferences( + getColumnReferences().stream().map(ColumnReference::toPb).collect(Collectors.toList())); + } + return foreignKey; + } + + @Nullable + public abstract String getName(); + + @Nullable + public abstract TableId getReferencedTable(); + + @Nullable + public abstract List getColumnReferences(); + + /** Returns a builder for foreign key. */ + @VisibleForTesting + public abstract ForeignKey.Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + + /** The name of the foreign key. * */ + public abstract ForeignKey.Builder setName(String name); + + /** The table referenced by this foreign key. * */ + public abstract ForeignKey.Builder setReferencedTable(TableId referencedTable); + + /** The set of column references for this foreign key. * */ + public abstract ForeignKey.Builder setColumnReferences(List columnReferences); + + /** Creates a {@code ForignKey} object. */ + public abstract ForeignKey build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FormatOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FormatOptions.java index af1878455c..c5acd48f6a 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FormatOptions.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FormatOptions.java @@ -43,6 +43,7 @@ public class FormatOptions implements Serializable { static final String GOOGLE_SHEETS = "GOOGLE_SHEETS"; static final String PARQUET = "PARQUET"; static final String ORC = "ORC"; + static final String ICEBERG = "ICEBERG"; private static final long serialVersionUID = -443376052020423691L; @@ -91,8 +92,8 @@ public static FormatOptions datastoreBackup() { } /** Default options for AVRO format. */ - public static FormatOptions avro() { - return new FormatOptions(AVRO); + public static AvroOptions avro() { + return AvroOptions.newBuilder().build(); } /** Default options for BIGTABLE format. */ @@ -107,7 +108,7 @@ public static FormatOptions googleSheets() { /** Default options for PARQUET format. */ public static FormatOptions parquet() { - return new FormatOptions(PARQUET); + return ParquetOptions.newBuilder().build(); } /** Default options for the ORC format. */ @@ -115,17 +116,28 @@ public static FormatOptions orc() { return new FormatOptions(ORC); } + /** Default options for the Apache Iceberg table format. */ + public static FormatOptions iceberg() { + return new FormatOptions(ICEBERG); + } + /** Default options for the provided format. */ public static FormatOptions of(String format) { checkArgument(!isNullOrEmpty(format), "Provided format is null or empty"); if (format.equals(CSV)) { return csv(); + } else if (format.equals(AVRO)) { + return avro(); } else if (format.equals(DATASTORE_BACKUP)) { return datastoreBackup(); } else if (format.equals(GOOGLE_SHEETS)) { return googleSheets(); } else if (format.equals(BIGTABLE)) { return bigtable(); + } else if (format.equals(PARQUET)) { + return parquet(); + } else if (format.equals(ICEBERG)) { + return iceberg(); } return new FormatOptions(format); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/HivePartitioningOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/HivePartitioningOptions.java index db6e434ce6..7df046ee11 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/HivePartitioningOptions.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/HivePartitioningOptions.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import com.google.common.base.MoreObjects; +import java.util.List; import java.util.Objects; /** HivePartitioningOptions currently supported types include: AVRO, CSV, JSON, ORC and Parquet. */ @@ -25,12 +26,14 @@ public final class HivePartitioningOptions { private final String mode; private final Boolean requirePartitionFilter; private final String sourceUriPrefix; + private final List fields; public static final class Builder { private String mode; private Boolean requirePartitionFilter; private String sourceUriPrefix; + private List fields; private Builder() {} @@ -38,6 +41,7 @@ private Builder(HivePartitioningOptions options) { this.mode = options.mode; this.requirePartitionFilter = options.requirePartitionFilter; this.sourceUriPrefix = options.sourceUriPrefix; + this.fields = options.fields; } /** @@ -78,6 +82,19 @@ public Builder setSourceUriPrefix(String sourceUriPrefix) { return this; } + /** + * [Output-only] For permanent external tables, this field is populated with the hive partition + * keys in the order they were inferred. + * + *

    The types of the partition keys can be deduced by checking the table schema (which will + * include the partition keys). Not every API will populate this field in the output. For + * example, Tables.Get will populate it, but Tables.List will not contain this field. + */ + public Builder setFields(List fields) { + this.fields = fields; + return this; + } + /** Creates a {@link HivePartitioningOptions} object. */ public HivePartitioningOptions build() { return new HivePartitioningOptions(this); @@ -88,6 +105,7 @@ private HivePartitioningOptions(Builder builder) { this.mode = builder.mode; this.requirePartitionFilter = builder.requirePartitionFilter; this.sourceUriPrefix = builder.sourceUriPrefix; + this.fields = builder.fields; } /* Returns the mode of hive partitioning */ @@ -108,6 +126,11 @@ public String getSourceUriPrefix() { return sourceUriPrefix; } + /* Returns the fields of hive partitioning */ + public List getFields() { + return fields; + } + /** Returns a builder for the {@link HivePartitioningOptions} object. */ public Builder toBuilder() { return new Builder(this); @@ -135,13 +158,13 @@ public boolean equals(Object obj) { && Objects.equals(mode, ((HivePartitioningOptions) obj).getMode()) && Objects.equals( requirePartitionFilter, ((HivePartitioningOptions) obj).getRequirePartitionFilter()) - && Objects.equals( - sourceUriPrefix, ((HivePartitioningOptions) obj).getSourceUriPrefix()); + && Objects.equals(sourceUriPrefix, ((HivePartitioningOptions) obj).getSourceUriPrefix()) + && Objects.equals(fields, ((HivePartitioningOptions) obj).getFields()); } @Override public int hashCode() { - return Objects.hash(mode, sourceUriPrefix); + return Objects.hash(mode, sourceUriPrefix, fields); } com.google.api.services.bigquery.model.HivePartitioningOptions toPb() { @@ -150,6 +173,7 @@ com.google.api.services.bigquery.model.HivePartitioningOptions toPb() { options.setMode(mode); options.setRequirePartitionFilter(requirePartitionFilter); options.setSourceUriPrefix(sourceUriPrefix); + options.setFields(fields); return options; } @@ -165,6 +189,9 @@ static HivePartitioningOptions fromPb( if (options.getSourceUriPrefix() != null) { builder.setSourceUriPrefix(options.getSourceUriPrefix()); } + if (options.getFields() != null) { + builder.setFields(options.getFields()); + } return builder.build(); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/IndexUnusedReason.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/IndexUnusedReason.java new file mode 100644 index 0000000000..bb4f0c3c32 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/IndexUnusedReason.java @@ -0,0 +1,135 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import javax.annotation.Nullable; + +/** Represents Reason of why the index was not used in a SQL search. */ +@AutoValue +public abstract class IndexUnusedReason implements Serializable { + + @AutoValue.Builder + public abstract static class Builder { + + /** + * Specifies the name of the unused search index, if available. + * + * @param indexName indexName or {@code null} for none + */ + public abstract Builder setIndexName(String indexName); + + /** + * Specifies the high-level reason for the scenario when no search index was used. + * + * @param code code or {@code null} for none + */ + public abstract Builder setCode(String code); + + /** + * Free form human-readable reason for the scenario when no search index was used. + * + * @param message message or {@code null} for none + */ + public abstract Builder setMessage(String message); + + /** + * Specifies the base table involved in the reason that no search index was used. + * + * @param baseTable baseTable or {@code null} for none + */ + public abstract Builder setBaseTableId(TableId baseTable); + + /** Creates a @code IndexUnusedReason} object. */ + public abstract IndexUnusedReason build(); + } + + public abstract Builder toBuilder(); + + public static Builder newBuilder() { + return new AutoValue_IndexUnusedReason.Builder(); + } + + /** + * Returns the name of the unused search index, if available. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract String getIndexName(); + + /** + * Returns the high-level reason for the scenario when no search index was used. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract String getCode(); + + /** + * Returns free form human-readable reason for the scenario when no search index was used. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract String getMessage(); + + /** + * Returns the base table involved in the reason that no search index was used. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract TableId getBaseTableId(); + + com.google.api.services.bigquery.model.IndexUnusedReason toPb() { + com.google.api.services.bigquery.model.IndexUnusedReason indexUnusedReason = + new com.google.api.services.bigquery.model.IndexUnusedReason(); + if (getIndexName() != null) { + indexUnusedReason.setIndexName(indexUnusedReason.getIndexName()); + } + if (getCode() != null) { + indexUnusedReason.setCode(indexUnusedReason.getCode()); + } + if (getMessage() != null) { + indexUnusedReason.setMessage(indexUnusedReason.getMessage()); + } + if (getBaseTableId() != null) { + indexUnusedReason.setBaseTable(getBaseTableId().toPb()); + } + return indexUnusedReason; + } + + static IndexUnusedReason fromPb( + com.google.api.services.bigquery.model.IndexUnusedReason indexUnusedReason) { + Builder builder = newBuilder(); + if (indexUnusedReason.getIndexName() != null) { + builder.setIndexName(indexUnusedReason.getIndexName()); + } + if (indexUnusedReason.getCode() != null) { + builder.setCode(indexUnusedReason.getCode()); + } + if (indexUnusedReason.getMessage() != null) { + builder.setMessage(indexUnusedReason.getMessage()); + } + if (indexUnusedReason.getBaseTable() != null) { + builder.setBaseTableId(TableId.fromPb(indexUnusedReason.getBaseTable())); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/InsertAllRequest.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/InsertAllRequest.java index 53952bc6cf..ce93800987 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/InsertAllRequest.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/InsertAllRequest.java @@ -22,6 +22,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Collections; import java.util.HashMap; @@ -479,4 +480,15 @@ public boolean equals(Object obj) { && Objects.equals(skipInvalidRows, other.skipInvalidRows) && Objects.equals(templateSuffix, other.templateSuffix); } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + public Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.insert_all.table", getFieldAsString(this.getTable().getTable())) + .put("bq.insert_all.template_suffix", getFieldAsString(this.getTemplateSuffix())) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java index 6ef7eb9061..c64327500f 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java @@ -21,6 +21,7 @@ import com.google.api.gax.retrying.BasicResultRetryAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.retrying.TimedAttemptSettings; +import com.google.cloud.PageImpl; import com.google.cloud.RetryHelper; import com.google.cloud.RetryOption; import com.google.cloud.bigquery.BigQuery.JobOption; @@ -28,15 +29,18 @@ import com.google.cloud.bigquery.BigQuery.TableDataListOption; import com.google.cloud.bigquery.JobConfiguration.Type; import com.google.common.collect.ImmutableList; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; import java.io.IOException; import java.io.ObjectInputStream; +import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; -import org.threeten.bp.Duration; /** * A Google BigQuery Job. @@ -51,20 +55,20 @@ public class Job extends JobInfo { private static final RetrySettings DEFAULT_JOB_WAIT_SETTINGS = RetrySettings.newBuilder() - .setTotalTimeout(Duration.ofHours(12L)) - .setInitialRetryDelay(Duration.ofSeconds(1L)) + .setTotalTimeoutDuration(Duration.ofHours(12L)) + .setInitialRetryDelayDuration(Duration.ofSeconds(1L)) .setRetryDelayMultiplier(2.0) .setJittered(true) - .setMaxRetryDelay(Duration.ofMinutes(1L)) + .setMaxRetryDelayDuration(Duration.ofMinutes(1L)) .build(); static final RetrySettings DEFAULT_QUERY_JOB_WAIT_SETTINGS = RetrySettings.newBuilder() - .setTotalTimeout(Duration.ofHours(12L)) - .setInitialRetryDelay(Duration.ofSeconds(3L)) + .setTotalTimeoutDuration(Duration.ofHours(12L)) + .setInitialRetryDelayDuration(Duration.ofSeconds(3L)) .setRetryDelayMultiplier(1.0) .setJittered(true) - .setMaxRetryDelay(Duration.ofSeconds(3L)) + .setMaxRetryDelayDuration(Duration.ofSeconds(3L)) .build(); static final QueryResultsOption[] DEFAULT_QUERY_WAIT_OPTIONS = { @@ -73,6 +77,10 @@ public class Job extends JobInfo { private final BigQueryOptions options; private transient BigQuery bigquery; + private static final BigQueryRetryConfig DEFAULT_RETRY_CONFIG = + BigQueryRetryConfig.newBuilder() + .retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG) + .build(); // retry config with Error Message for RateLimitExceeded Error /** A builder for {@code Job} objects. */ public static final class Builder extends JobInfo.Builder { @@ -167,7 +175,21 @@ public Job build() { */ public boolean exists() { checkNotDryRun("exists"); - return bigquery.getJob(getJobId(), JobOption.fields()) != null; + Span exists = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + exists = + options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.exists") + .startSpan(); + } + try (Scope existsScope = exists != null ? exists.makeCurrent() : null) { + return bigquery.getJob(getJobId(), JobOption.fields()) != null; + } finally { + if (exists != null) { + exists.end(); + } + } } /** @@ -188,15 +210,45 @@ public boolean exists() { */ public boolean isDone() { checkNotDryRun("isDone"); - Job job = bigquery.getJob(getJobId(), JobOption.fields(BigQuery.JobField.STATUS)); - return job == null || JobStatus.State.DONE.equals(job.getStatus().getState()); + if (hasDoneState()) { + return true; + } + Span isDone = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + isDone = + options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.isDone") + .startSpan(); + } + try (Scope isDoneScope = isDone != null ? isDone.makeCurrent() : null) { + Job job = bigquery.getJob(getJobId(), JobOption.fields(BigQuery.JobField.STATUS)); + return job == null || job.hasDoneState(); + } finally { + if (isDone != null) { + isDone.end(); + } + } + } + + private boolean hasDoneState() { + return getStatus() != null && JobStatus.State.DONE.equals(getStatus().getState()); } + + /** See {@link #waitFor(BigQueryRetryConfig, RetryOption...)} */ + public Job waitFor(RetryOption... waitOptions) throws InterruptedException { + return waitForInternal(DEFAULT_RETRY_CONFIG, waitOptions); + } + /** * Blocks until this job completes its execution, either failing or succeeding. This method * returns current job's latest information. If the job no longer exists, this method returns * {@code null}. By default, the job status is checked using jittered exponential backoff with 1 * second as an initial delay, 2.0 as a backoff factor, 1 minute as maximum delay between polls, - * 12 hours as a total timeout and unlimited number of attempts. + * 12 hours as a total timeout and unlimited number of attempts. For query jobs, the job status + * check can be configured to retry on specific BigQuery error messages using {@link + * BigQueryRetryConfig}. This {@link BigQueryRetryConfig} configuration is not available for + * non-query jobs. * *

    Example usage of {@code waitFor()}. * @@ -227,25 +279,68 @@ public boolean isDone() { * } * } * + *

    Example usage of {@code waitFor()} with BigQuery retry configuration to retry on rate limit + * exceeded error messages for query jobs. + * + *

    {@code
    +   * Job completedJob =
    +   *     job.waitFor(
    +   *             BigQueryRetryConfig.newBuilder()
    +   *                 .retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG)
    +   *                 .retryOnMessage(BigQueryErrorMessages.JOB_RATE_LIMIT_EXCEEDED_MSG)
    +   *                 .retryOnRegEx(BigQueryErrorMessages.RetryRegExPatterns.RATE_LIMIT_EXCEEDED_REGEX)
    +   *                 .build());
    +   * if (completedJob == null) {
    +   *   // job no longer exists
    +   * } else if (completedJob.getStatus().getError() != null) {
    +   *   // job failed, handle error
    +   * } else {
    +   *   // job completed successfully
    +   * }
    +   * }
    + * + * @param bigQueryRetryConfig configures retries for query jobs for BigQuery failures * @param waitOptions options to configure checking period and timeout * @throws BigQueryException upon failure, check {@link BigQueryException#getCause()} for details * @throws InterruptedException if the current thread gets interrupted while waiting for the job * to complete */ - public Job waitFor(RetryOption... waitOptions) throws InterruptedException { + public Job waitFor(BigQueryRetryConfig bigQueryRetryConfig, RetryOption... waitOptions) + throws InterruptedException { + return waitForInternal(bigQueryRetryConfig, waitOptions); + } + + private Job waitForInternal(BigQueryRetryConfig bigQueryRetryConfig, RetryOption... waitOptions) + throws InterruptedException { checkNotDryRun("waitFor"); - Object completedJobResponse; - if (getConfiguration().getType() == Type.QUERY) { - completedJobResponse = - waitForQueryResults( - RetryOption.mergeToSettings(DEFAULT_JOB_WAIT_SETTINGS, waitOptions), - DEFAULT_QUERY_WAIT_OPTIONS); - } else { - completedJobResponse = - waitForJob(RetryOption.mergeToSettings(DEFAULT_QUERY_JOB_WAIT_SETTINGS, waitOptions)); + Span waitFor = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + waitFor = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.waitFor") + .startSpan(); } + try (Scope waitForScope = waitFor != null ? waitFor.makeCurrent() : null) { + Object completedJobResponse; + if (getConfiguration().getType() == Type.QUERY) { + completedJobResponse = + waitForQueryResults( + RetryOption.mergeToSettings(DEFAULT_JOB_WAIT_SETTINGS, waitOptions), + bigQueryRetryConfig, + DEFAULT_QUERY_WAIT_OPTIONS); + } else { + completedJobResponse = + waitForJob(RetryOption.mergeToSettings(DEFAULT_QUERY_JOB_WAIT_SETTINGS, waitOptions)); + } - return completedJobResponse == null ? null : reload(); + return completedJobResponse == null ? null : reload(); + } finally { + if (waitFor != null) { + waitFor.end(); + } + } } /** @@ -262,93 +357,169 @@ public Job waitFor(RetryOption... waitOptions) throws InterruptedException { public TableResult getQueryResults(QueryResultsOption... options) throws InterruptedException, JobException { checkNotDryRun("getQueryResults"); - if (getConfiguration().getType() != Type.QUERY) { - throw new UnsupportedOperationException( - "Getting query results is supported only for " + Type.QUERY + " jobs"); - } - - List waitOptions = - new ArrayList<>(Arrays.asList(DEFAULT_QUERY_WAIT_OPTIONS)); - List listOptions = new ArrayList<>(); - for (QueryResultsOption option : options) { - switch (option.getRpcOption()) { - case MAX_RESULTS: - listOptions.add(TableDataListOption.pageSize((Long) option.getValue())); - break; - case PAGE_TOKEN: - listOptions.add(TableDataListOption.pageToken((String) option.getValue())); - break; - case START_INDEX: - listOptions.add(TableDataListOption.startIndex((Long) option.getValue())); - break; - case TIMEOUT: - waitOptions.add(QueryResultsOption.maxWaitTime((Long) option.getValue())); - break; - } + + Span getQueryResults = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getQueryResults = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.getQueryResults") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); } + try (Scope getQueryResultsScope = + getQueryResults != null ? getQueryResults.makeCurrent() : null) { - QueryResponse response = - waitForQueryResults( - DEFAULT_JOB_WAIT_SETTINGS, waitOptions.toArray(new QueryResultsOption[0])); + if (getConfiguration().getType() != Type.QUERY) { + throw new UnsupportedOperationException( + "Getting query results is supported only for " + Type.QUERY + " jobs"); + } - // Get the job resource to determine if it has errored. - Job job = this; - if (job.getStatus() == null || !JobStatus.State.DONE.equals(job.getStatus().getState())) { - job = reload(); - } - if (job.getStatus() != null && job.getStatus().getError() != null) { - throw new BigQueryException( - job.getStatus().getExecutionErrors() == null - ? ImmutableList.of(job.getStatus().getError()) - : ImmutableList.copyOf(job.getStatus().getExecutionErrors())); - } + List waitOptions = + new ArrayList<>(Arrays.asList(DEFAULT_QUERY_WAIT_OPTIONS)); + List listOptions = new ArrayList<>(); + for (QueryResultsOption option : options) { + switch (option.getRpcOption()) { + case MAX_RESULTS: + listOptions.add(TableDataListOption.pageSize((Long) option.getValue())); + break; + case PAGE_TOKEN: + listOptions.add(TableDataListOption.pageToken((String) option.getValue())); + break; + case START_INDEX: + listOptions.add(TableDataListOption.startIndex((Long) option.getValue())); + break; + case TIMEOUT: + waitOptions.add(QueryResultsOption.maxWaitTime((Long) option.getValue())); + break; + } + } - // If there are no rows in the result, this may have been a DDL query. - // Listing table data might fail, such as with CREATE VIEW queries. - // Avoid a tabledata.list API request by returning an empty TableResult. - if (response.getTotalRows() == 0) { - return new EmptyTableResult(response.getSchema()); - } + QueryResponse response = + waitForQueryResults( + DEFAULT_JOB_WAIT_SETTINGS, + DEFAULT_RETRY_CONFIG, + waitOptions.toArray(new QueryResultsOption[0])); + + // Get the job resource to determine if it has errored. + Job job = this; + if (job.getStatus() == null || !JobStatus.State.DONE.equals(job.getStatus().getState())) { + job = reload(); + } + if (job.getStatus() != null && job.getStatus().getError() != null) { + throw new BigQueryException( + job.getStatus().getExecutionErrors() == null + ? ImmutableList.of(job.getStatus().getError()) + : ImmutableList.copyOf(job.getStatus().getExecutionErrors())); + } + + // If there are no rows in the result, this may have been a DDL query. + // Listing table data might fail, such as with CREATE VIEW queries. + // Avoid a tabledata.list API request by returning an empty TableResult. + if (response.getTotalRows() == 0) { + TableResult emptyTableResult = + TableResult.newBuilder() + .setSchema(response.getSchema()) + .setJobId(job.getJobId()) + .setTotalRows(0L) + .setPageNoSchema(new PageImpl(null, "", null)) + .build(); + return emptyTableResult; + } - TableId table = - ((QueryJobConfiguration) getConfiguration()).getDestinationTable() == null - ? ((QueryJobConfiguration) job.getConfiguration()).getDestinationTable() - : ((QueryJobConfiguration) getConfiguration()).getDestinationTable(); - return bigquery.listTableData( - table, response.getSchema(), listOptions.toArray(new TableDataListOption[0])); + TableId table = + ((QueryJobConfiguration) getConfiguration()).getDestinationTable() == null + ? ((QueryJobConfiguration) job.getConfiguration()).getDestinationTable() + : ((QueryJobConfiguration) getConfiguration()).getDestinationTable(); + TableResult tableResult = + bigquery.listTableData( + table, response.getSchema(), listOptions.toArray(new TableDataListOption[0])); + TableResult tableResultWithJobId = tableResult.toBuilder().setJobId(job.getJobId()).build(); + return tableResultWithJobId; + } finally { + if (getQueryResults != null) { + getQueryResults.end(); + } + } } private QueryResponse waitForQueryResults( - RetrySettings waitSettings, final QueryResultsOption... resultsOptions) + RetrySettings retrySettings, + BigQueryRetryConfig bigQueryRetryConfig, + final QueryResultsOption... resultsOptions) throws InterruptedException { if (getConfiguration().getType() != Type.QUERY) { throw new UnsupportedOperationException( "Waiting for query results is supported only for " + Type.QUERY + " jobs"); } - try { - return RetryHelper.poll( + Span waitForQueryResults = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + waitForQueryResults = + options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.waitForQueryResults") + .setAllAttributes(otelAttributesFromOptions(resultsOptions)) + .startSpan(); + } + try (Scope waitForQueryResultsScope = + waitForQueryResults != null ? waitForQueryResults.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override public QueryResponse call() { return bigquery.getQueryResults(getJobId(), resultsOptions); } }, - waitSettings, + retrySettings, new BasicResultRetryAlgorithm() { @Override - public boolean shouldRetry(Throwable prevThrowable, QueryResponse prevResponse) { + public boolean shouldRetry( + Throwable prevThrowable, + QueryResponse + prevResponse) { // Used by BigQueryRetryAlgorithm.shouldRetryBasedOnResult return prevResponse != null && !prevResponse.getCompleted(); } }, - options.getClock()); - } catch (ExecutionException e) { + options.getClock(), + bigQueryRetryConfig, + options.isOpenTelemetryTracingEnabled(), + options.getOpenTelemetryTracer()); + } catch (BigQueryRetryHelper.BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (waitForQueryResults != null) { + waitForQueryResults.end(); + } } } private Job waitForJob(RetrySettings waitSettings) throws InterruptedException { - try { + Span waitForJob = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + waitForJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.waitForJob") + .setAttribute( + "bq.job.wait_settings.total_timeout", + getFieldAsString(waitSettings.getTotalTimeoutDuration())) + .setAttribute( + "bq.job.wait_settings.initial_retry_delay", + getFieldAsString(waitSettings.getInitialRetryDelayDuration())) + .setAttribute( + "bq.job.wait_settings.max_retry_delay", + getFieldAsString(waitSettings.getMaxRetryDelayDuration())) + .setAttribute( + "bq.job.wait_settings.initial_rpc_timeout", + getFieldAsString(waitSettings.getInitialRpcTimeoutDuration())) + .setAttribute( + "bq.job.wait_settings.max_rpc_timeout", + getFieldAsString(waitSettings.getMaxRpcTimeoutDuration())) + .startSpan(); + } + try (Scope waitForJobScope = waitForJob != null ? waitForJob.makeCurrent() : null) { return RetryHelper.poll( new Callable() { @Override @@ -373,6 +544,10 @@ public boolean shouldRetry(Throwable prevThrowable, Job prevResponse) { options.getClock()); } catch (ExecutionException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (waitForJob != null) { + waitForJob.end(); + } } } @@ -403,14 +578,31 @@ public boolean shouldRetry(Throwable prevThrowable, Job prevResponse) { */ public Job reload(JobOption... options) { checkNotDryRun("reload"); - Job job = bigquery.getJob(getJobId(), options); - if (job != null && job.getStatus().getError() != null) { - throw new BigQueryException( - job.getStatus().getExecutionErrors() == null - ? ImmutableList.of(job.getStatus().getError()) - : ImmutableList.copyOf(job.getStatus().getExecutionErrors())); - } - return job; + Span reload = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + reload = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.reload") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + try (Scope reloadScope = reload != null ? reload.makeCurrent() : null) { + Job job = bigquery.getJob(getJobId(), options); + if (job != null && job.getStatus().getError() != null) { + throw new BigQueryException( + job.getStatus().getExecutionErrors() == null + ? ImmutableList.of(job.getStatus().getError()) + : ImmutableList.copyOf(job.getStatus().getExecutionErrors())); + } + return job; + } finally { + if (reload != null) { + reload.end(); + } + } } /** @@ -432,7 +624,22 @@ public Job reload(JobOption... options) { */ public boolean cancel() { checkNotDryRun("cancel"); - return bigquery.cancel(getJobId()); + Span cancel = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + cancel = + options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.Job.cancel") + .startSpan(); + } + + try (Scope cancelScope = cancel != null ? cancel.makeCurrent() : null) { + return bigquery.cancel(getJobId()); + } finally { + if (cancel != null) { + cancel.end(); + } + } } private void checkNotDryRun(String op) { @@ -496,4 +703,19 @@ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundE static Job fromPb(BigQuery bigquery, com.google.api.services.bigquery.model.Job jobPb) { return new Job(bigquery, new JobInfo.BuilderImpl(jobPb)); } + + private static Attributes otelAttributesFromOptions(Option... options) { + Attributes attributes = Attributes.builder().build(); + for (Option option : options) { + attributes = + attributes.toBuilder() + .put(option.getRpcOption().toString(), option.getValue().toString()) + .build(); + } + return attributes; + } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobCreationReason.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobCreationReason.java new file mode 100644 index 0000000000..296c5cc049 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobCreationReason.java @@ -0,0 +1,80 @@ +/* + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import javax.annotation.Nullable; + +/** + * Maps to JobCreationReason + * when used with {@link + * com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode#JOB_CREATION_OPTIONAL}. + * + *

    The code indicates the high level reason why a job was created. The default is `UNKNOWN` if + * there is no mapping found between the server response and the client library. + */ +public class JobCreationReason { + + public enum Code { + REQUESTED("REQUESTED"), + LONG_RUNNING("LONG_RUNNING"), + LARGE_RESULTS("LARGE_RESULTS"), + OTHER("OTHER"), + UNKNOWN("UNKNOWN"); + + private final String reason; + + Code(String reason) { + this.reason = reason; + } + + /** + * Maps the server code to BQ code. Returns {@link Code#UNKNOWN} if the mapping does not exist. + */ + static Code fromValue(@Nullable String reason) { + for (JobCreationReason.Code code : Code.values()) { + if (code.reason.equals(reason)) { + return code; + } + } + return UNKNOWN; + } + } + + @Nullable private final Code code; + + JobCreationReason(Code code) { + this.code = code; + } + + static JobCreationReason fromPb( + com.google.api.services.bigquery.model.JobCreationReason jobCreationReason) { + // JobCreationReason may be null if the JobCreationMode is specified to be Optional + // Note: JobCreationMode.Optional may also end up creating a job depending on the + // query complexity and other factors. + if (jobCreationReason == null) { + return null; + } + return new JobCreationReason(Code.fromValue(jobCreationReason.getCode())); + } + + /** + * @return JobCreationReason code or {@link Code#UNKNOWN} if mapping does not exist. + */ + public Code getCode() { + return code; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobException.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobException.java index 612affd315..0bfa2572a3 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobException.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobException.java @@ -34,6 +34,7 @@ public class JobException extends RuntimeException { public JobId getId() { return id; } + /** * The errors reported by the job. * diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobId.java index b966cebe1d..4bfc2aa002 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobId.java @@ -21,6 +21,7 @@ import com.google.api.services.bigquery.model.JobReference; import com.google.auto.value.AutoValue; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.UUID; import javax.annotation.Nullable; @@ -123,4 +124,16 @@ static JobId fromPb(JobReference jobRef) { .setLocation(jobRef.getLocation()) .build(); } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.job.id", getFieldAsString(this.getJob())) + .put("bq.job.location", getFieldAsString(this.getLocation())) + .put("bq.job.project", getFieldAsString(this.getProject())) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobInfo.java index 8f571ff551..fdf1b6e606 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobInfo.java @@ -66,6 +66,12 @@ public enum WriteDisposition { /** Configures the job to overwrite the table data if table already exists. */ WRITE_TRUNCATE, + /** + * Configures the job to retain schema and constraints on an existing table, and truncate and + * replace data. + */ + WRITE_TRUNCATE_DATA, + /** Configures the job to append data to the table if it already exists. */ WRITE_APPEND, diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java index d971a6fb27..c78378ab53 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java @@ -17,12 +17,17 @@ package com.google.cloud.bigquery; import com.google.api.core.ApiFunction; +import com.google.api.services.bigquery.model.ExportDataStatistics; import com.google.api.services.bigquery.model.JobConfiguration; import com.google.api.services.bigquery.model.JobStatistics2; import com.google.api.services.bigquery.model.JobStatistics3; import com.google.api.services.bigquery.model.JobStatistics4; +import com.google.api.services.bigquery.model.JobStatistics5; +import com.google.api.services.bigquery.model.QueryParameter; +import com.google.auto.value.AutoValue; import com.google.cloud.StringEnumType; import com.google.cloud.StringEnumValue; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.base.MoreObjects.ToStringHelper; @@ -30,12 +35,13 @@ import java.io.Serializable; import java.util.List; import java.util.Objects; +import javax.annotation.Nullable; import org.checkerframework.checker.nullness.compatqual.NullableDecl; /** A Google BigQuery Job statistics. */ public abstract class JobStatistics implements Serializable { - private static final long serialVersionUID = 1433024714741660399L; + private static final long serialVersionUID = 1433024714741660400L; private final Long creationTime; private final Long endTime; @@ -44,18 +50,43 @@ public abstract class JobStatistics implements Serializable { private final String parentJobId; private final ScriptStatistics scriptStatistics; private final List reservationUsage; + private final TransactionInfo transactionInfo; + private final SessionInfo sessionInfo; + private final Long totalSlotMs; /** A Google BigQuery Copy Job statistics. */ public static class CopyStatistics extends JobStatistics { - private static final long serialVersionUID = 8218325588441660938L; + private static final long serialVersionUID = 8218325588441660939L; + + private final Long copiedLogicalBytes; + + private final Long copiedRows; static final class Builder extends JobStatistics.Builder { + private Long copiedLogicalBytes; + + private Long copiedRows; + private Builder() {} private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { super(statisticsPb); + if (statisticsPb.getCopy() != null) { + this.copiedLogicalBytes = statisticsPb.getCopy().getCopiedLogicalBytes(); + this.copiedRows = statisticsPb.getCopy().getCopiedRows(); + } + } + + Builder setCopiedLogicalBytes(long copiedLogicalBytes) { + this.copiedLogicalBytes = copiedLogicalBytes; + return self(); + } + + Builder setCopiedRows(long copiedRows) { + this.copiedRows = copiedRows; + return self(); } @Override @@ -66,6 +97,25 @@ CopyStatistics build() { private CopyStatistics(Builder builder) { super(builder); + this.copiedLogicalBytes = builder.copiedLogicalBytes; + this.copiedRows = builder.copiedRows; + } + + /** Returns number of logical bytes copied to the destination table. */ + public Long getCopiedLogicalBytes() { + return copiedLogicalBytes; + } + + /** Returns number of rows copied to the destination table. */ + public Long getCopiedRows() { + return copiedRows; + } + + @Override + ToStringHelper toStringHelper() { + return super.toStringHelper() + .add("copiedLogicalBytes", copiedLogicalBytes) + .add("copiedRows", copiedRows); } @Override @@ -78,7 +128,15 @@ public final boolean equals(Object obj) { @Override public final int hashCode() { - return baseHashCode(); + return Objects.hash(baseHashCode(), copiedLogicalBytes, copiedRows); + } + + @Override + com.google.api.services.bigquery.model.JobStatistics toPb() { + JobStatistics5 copyStatisticsPb = new JobStatistics5(); + copyStatisticsPb.setCopiedLogicalBytes(copiedLogicalBytes); + copyStatisticsPb.setCopiedRows(copiedRows); + return super.toPb().setCopy(copyStatisticsPb); } static Builder newBuilder() { @@ -98,16 +156,21 @@ public static class ExtractStatistics extends JobStatistics { private final List destinationUriFileCounts; + private final Long inputBytes; + static final class Builder extends JobStatistics.Builder { private List destinationUriFileCounts; + private Long inputBytes; + private Builder() {} private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { super(statisticsPb); if (statisticsPb.getExtract() != null) { this.destinationUriFileCounts = statisticsPb.getExtract().getDestinationUriFileCounts(); + this.inputBytes = statisticsPb.getExtract().getInputBytes(); } } @@ -116,6 +179,11 @@ Builder setDestinationUriFileCounts(List destinationUriFileCounts) { return self(); } + Builder setInputBytes(Long inputBytes) { + this.inputBytes = inputBytes; + return self(); + } + @Override ExtractStatistics build() { return new ExtractStatistics(this); @@ -125,6 +193,7 @@ ExtractStatistics build() { private ExtractStatistics(Builder builder) { super(builder); this.destinationUriFileCounts = builder.destinationUriFileCounts; + this.inputBytes = builder.inputBytes; } /** @@ -136,6 +205,11 @@ public List getDestinationUriFileCounts() { return destinationUriFileCounts; } + /** Returns number of user bytes extracted into the result. */ + public Long getInputBytes() { + return inputBytes; + } + @Override ToStringHelper toStringHelper() { return super.toStringHelper().add("destinationUriFileCounts", destinationUriFileCounts); @@ -156,9 +230,10 @@ public final int hashCode() { @Override com.google.api.services.bigquery.model.JobStatistics toPb() { - com.google.api.services.bigquery.model.JobStatistics statisticsPb = super.toPb(); - return statisticsPb.setExtract( - new JobStatistics4().setDestinationUriFileCounts(destinationUriFileCounts)); + JobStatistics4 extractStatisticsPb = new JobStatistics4(); + extractStatisticsPb.setDestinationUriFileCounts(destinationUriFileCounts); + extractStatisticsPb.setInputBytes(inputBytes); + return super.toPb().setExtract(extractStatisticsPb); } static Builder newBuilder() { @@ -317,24 +392,30 @@ static LoadStatistics fromPb(com.google.api.services.bigquery.model.JobStatistic /** A Google BigQuery Query Job statistics. */ public static class QueryStatistics extends JobStatistics { - private static final long serialVersionUID = 7539354109226732353L; + private static final long serialVersionUID = 7539354109226732354L; + private final BiEngineStats biEngineStats; private final Integer billingTier; private final Boolean cacheHit; + private Boolean useReadApi; private final String ddlOperationPerformed; private final TableId ddlTargetTable; private final RoutineId ddlTargetRoutine; private final Long estimatedBytesProcessed; private final Long numDmlAffectedRows; + private final DmlStats dmlStats; + private final ExportDataStats exportDataStats; private final List referencedTables; private final StatementType statementType; private final Long totalBytesBilled; private final Long totalBytesProcessed; private final Long totalPartitionsProcessed; - private final Long totalSlotMs; private final List queryPlan; private final List timeline; private final Schema schema; + private final SearchStats searchStats; + private final MetadataCacheStats metadataCacheStats; + private final List queryParameters; /** * StatementType represents possible types of SQL statements reported as part of the @@ -373,6 +454,44 @@ public StatementType apply(String constant) { public static final StatementType DROP_FUNCTION = type.createAndRegister("DROP_FUNCTION"); public static final StatementType DROP_PROCEDURE = type.createAndRegister("DROP_PROCEDURE"); public static final StatementType MERGE = type.createAndRegister("MERGE"); + public static final StatementType CREATE_MATERIALIZED_VIEW = + type.createAndRegister("CREATE_MATERIALIZED_VIEW"); + public static final StatementType CREATE_TABLE_FUNCTION = + type.createAndRegister("CREATE_TABLE_FUNCTION"); + public static final StatementType CREATE_ROW_ACCESS_POLICY = + type.createAndRegister("CREATE_ROW_ACCESS_POLICY"); + public static final StatementType CREATE_SCHEMA = type.createAndRegister("CREATE_SCHEMA"); + public static final StatementType CREATE_SNAPSHOT_TABLE = + type.createAndRegister("CREATE_SNAPSHOT_TABLE"); + public static final StatementType CREATE_SEARCH_INDEX = + type.createAndRegister("CREATE_SEARCH_INDEX"); + public static final StatementType DROP_EXTERNAL_TABLE = + type.createAndRegister("DROP_EXTERNAL_TABLE"); + + public static final StatementType DROP_MODEL = type.createAndRegister("DROP_MODEL"); + public static final StatementType DROP_MATERIALIZED_VIEW = + type.createAndRegister("DROP_MATERIALIZED_VIEW"); + + public static final StatementType DROP_TABLE_FUNCTION = + type.createAndRegister("DROP_TABLE_FUNCTION"); + public static final StatementType DROP_SEARCH_INDEX = + type.createAndRegister("DROP_SEARCH_INDEX"); + public static final StatementType DROP_SCHEMA = type.createAndRegister("DROP_SCHEMA"); + public static final StatementType DROP_SNAPSHOT_TABLE = + type.createAndRegister("DROP_SNAPSHOT_TABLE"); + public static final StatementType DROP_ROW_ACCESS_POLICY = + type.createAndRegister("DROP_ROW_ACCESS_POLICY"); + public static final StatementType ALTER_MATERIALIZED_VIEW = + type.createAndRegister("ALTER_MATERIALIZED_VIEW"); + public static final StatementType ALTER_SCHEMA = type.createAndRegister("ALTER_SCHEMA"); + public static final StatementType SCRIPT = type.createAndRegister("SCRIPT"); + public static final StatementType TRUNCATE_TABLE = type.createAndRegister("TRUNCATE_TABLE"); + public static final StatementType CREATE_EXTERNAL_TABLE = + type.createAndRegister("CREATE_EXTERNAL_TABLE"); + public static final StatementType EXPORT_DATA = type.createAndRegister("EXPORT_DATA"); + public static final StatementType EXPORT_MODEL = type.createAndRegister("EXPORT_MODEL"); + public static final StatementType LOAD_DATA = type.createAndRegister("LOAD_DATA"); + public static final StatementType CALL = type.createAndRegister("CALL"); private StatementType(String constant) { super(constant); @@ -397,8 +516,83 @@ public static StatementType[] values() { } } + /** + * Statistics for the EXPORT DATA statement as part of Query Job. EXTRACT JOB statistics are + * populated in ExtractStatistics. + */ + @AutoValue + public abstract static class ExportDataStats implements Serializable { + private static final long serialVersionUID = 1L; + + /** + * Returns number of destination files generated in case of EXPORT DATA statement only. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract Long getFileCount(); + + /** + * Returns number of destination rows generated in case of EXPORT DATA statement only. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract Long getRowCount(); + + public abstract Builder toBuilder(); + + public static Builder newBuilder() { + return new AutoValue_JobStatistics_QueryStatistics_ExportDataStats.Builder(); + } + + static ExportDataStats fromPb(ExportDataStatistics exportDataStatisticsPb) { + Builder builder = newBuilder(); + if (exportDataStatisticsPb.getFileCount() != null) { + builder.setFileCount(exportDataStatisticsPb.getFileCount()); + } + if (exportDataStatisticsPb.getRowCount() != null) { + builder.setRowCount(exportDataStatisticsPb.getRowCount()); + } + return builder.build(); + } + + ExportDataStatistics toPb() { + ExportDataStatistics exportDataStatisticsPb = new ExportDataStatistics(); + if (getFileCount() != null) { + exportDataStatisticsPb.setFileCount(getFileCount()); + } + if (getRowCount() != null) { + exportDataStatisticsPb.setRowCount(getRowCount()); + } + return exportDataStatisticsPb; + } + + @AutoValue.Builder + public abstract static class Builder { + + /** + * Number of destination files generated in case of EXPORT DATA statement only. + * + * @param fileCount fileCount or {@code null} for none + */ + public abstract Builder setFileCount(Long fileCount); + + /** + * Number of destination rows generated in case of EXPORT DATA statement only. + * + * @param rowCount rowCount or {@code null} for none + */ + public abstract Builder setRowCount(Long rowCount); + + /** Creates a {@code ExportDataStats} object. */ + public abstract ExportDataStats build(); + } + } + static final class Builder extends JobStatistics.Builder { + private BiEngineStats biEngineStats; private Integer billingTier; private Boolean cacheHit; private String ddlOperationPerformed; @@ -406,21 +600,30 @@ static final class Builder extends JobStatistics.Builder referencedTables; private StatementType statementType; private Long totalBytesBilled; private Long totalBytesProcessed; private Long totalPartitionsProcessed; - private Long totalSlotMs; private List queryPlan; private List timeline; private Schema schema; + private List queryParameters; + private SearchStats searchStats; + + private MetadataCacheStats metadataCacheStats; private Builder() {} private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsPb) { super(statisticsPb); if (statisticsPb.getQuery() != null) { + if (statisticsPb.getQuery().getBiEngineStatistics() != null) { + this.biEngineStats = + BiEngineStats.fromPb(statisticsPb.getQuery().getBiEngineStatistics()); + } this.billingTier = statisticsPb.getQuery().getBillingTier(); this.cacheHit = statisticsPb.getQuery().getCacheHit(); this.ddlOperationPerformed = statisticsPb.getQuery().getDdlOperationPerformed(); @@ -435,7 +638,6 @@ private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsP this.totalBytesBilled = statisticsPb.getQuery().getTotalBytesBilled(); this.totalBytesProcessed = statisticsPb.getQuery().getTotalBytesProcessed(); this.totalPartitionsProcessed = statisticsPb.getQuery().getTotalPartitionsProcessed(); - this.totalSlotMs = statisticsPb.getQuery().getTotalSlotMs(); if (statisticsPb.getQuery().getStatementType() != null) { this.statementType = StatementType.valueOf(statisticsPb.getQuery().getStatementType()); } @@ -458,9 +660,28 @@ private Builder(com.google.api.services.bigquery.model.JobStatistics statisticsP if (statisticsPb.getQuery().getSchema() != null) { this.schema = Schema.fromPb(statisticsPb.getQuery().getSchema()); } + if (statisticsPb.getQuery().getSearchStatistics() != null) { + this.searchStats = SearchStats.fromPb(statisticsPb.getQuery().getSearchStatistics()); + } + if (statisticsPb.getQuery().getMetadataCacheStatistics() != null) { + this.metadataCacheStats = + MetadataCacheStats.fromPb(statisticsPb.getQuery().getMetadataCacheStatistics()); + } + if (statisticsPb.getQuery().getDmlStats() != null) { + this.dmlStats = DmlStats.fromPb(statisticsPb.getQuery().getDmlStats()); + } + if (statisticsPb.getQuery().getExportDataStatistics() != null) { + this.exportDataStats = + ExportDataStats.fromPb(statisticsPb.getQuery().getExportDataStatistics()); + } } } + Builder setBiEngineStats(BiEngineStats biEngineStats) { + this.biEngineStats = biEngineStats; + return self(); + } + Builder setBillingTier(Integer billingTier) { this.billingTier = billingTier; return self(); @@ -496,6 +717,16 @@ Builder setNumDmlAffectedRows(Long numDmlAffectedRows) { return self(); } + Builder setDmlStats(DmlStats dmlStats) { + this.dmlStats = dmlStats; + return self(); + } + + Builder setExportDataStats(ExportDataStats exportDataStats) { + this.exportDataStats = exportDataStats; + return self(); + } + Builder setReferenceTables(List referencedTables) { this.referencedTables = referencedTables; return self(); @@ -526,11 +757,6 @@ Builder setTotalPartitionsProcessed(Long totalPartitionsProcessed) { return self(); } - Builder setTotalSlotMs(Long totalSlotMs) { - this.totalSlotMs = totalSlotMs; - return self(); - } - Builder setQueryPlan(List queryPlan) { this.queryPlan = queryPlan; return self(); @@ -546,6 +772,21 @@ Builder setSchema(Schema schema) { return self(); } + Builder setSearchStats(SearchStats searchStats) { + this.searchStats = searchStats; + return self(); + } + + Builder setMetadataCacheStats(MetadataCacheStats metadataCacheStats) { + this.metadataCacheStats = metadataCacheStats; + return self(); + } + + Builder setQueryParameters(List queryParameters) { + this.queryParameters = queryParameters; + return self(); + } + @Override QueryStatistics build() { return new QueryStatistics(this); @@ -554,22 +795,33 @@ QueryStatistics build() { private QueryStatistics(Builder builder) { super(builder); + this.biEngineStats = builder.biEngineStats; this.billingTier = builder.billingTier; this.cacheHit = builder.cacheHit; + this.useReadApi = false; this.ddlOperationPerformed = builder.ddlOperationPerformed; this.ddlTargetTable = builder.ddlTargetTable; this.ddlTargetRoutine = builder.ddlTargetRoutine; this.estimatedBytesProcessed = builder.estimatedBytesProcessed; this.numDmlAffectedRows = builder.numDmlAffectedRows; + this.dmlStats = builder.dmlStats; + this.exportDataStats = builder.exportDataStats; this.referencedTables = builder.referencedTables; this.statementType = builder.statementType; this.totalBytesBilled = builder.totalBytesBilled; this.totalBytesProcessed = builder.totalBytesProcessed; this.totalPartitionsProcessed = builder.totalPartitionsProcessed; - this.totalSlotMs = builder.totalSlotMs; this.queryPlan = builder.queryPlan; this.timeline = builder.timeline; this.schema = builder.schema; + this.searchStats = builder.searchStats; + this.metadataCacheStats = builder.metadataCacheStats; + this.queryParameters = builder.queryParameters; + } + + /** Returns query statistics specific to the use of BI Engine. */ + public BiEngineStats getBiEngineStats() { + return biEngineStats; } /** Returns the billing tier for the job. */ @@ -586,6 +838,18 @@ public Boolean getCacheHit() { return cacheHit; } + /** Returns whether the query result is read from the high throughput ReadAPI. */ + @VisibleForTesting + public Boolean getUseReadApi() { + return useReadApi; + } + + /** Sets internal state to reflect the use of the high throughput ReadAPI. */ + @VisibleForTesting + public void setUseReadApi(Boolean useReadApi) { + this.useReadApi = useReadApi; + } + /** [BETA] For DDL queries, returns the operation applied to the DDL target table. */ public String getDdlOperationPerformed() { return ddlOperationPerformed; @@ -614,6 +878,16 @@ public Long getNumDmlAffectedRows() { return numDmlAffectedRows; } + /** Detailed statistics for DML statements. */ + public DmlStats getDmlStats() { + return dmlStats; + } + + /** Detailed statistics for EXPORT DATA statement. */ + public ExportDataStats getExportDataStats() { + return exportDataStats; + } + /** * Referenced tables for the job. Queries that reference more than 50 tables will not have a * complete list. @@ -645,11 +919,6 @@ public Long getTotalPartitionsProcessed() { return totalPartitionsProcessed; } - /** Returns the slot-milliseconds consumed by the query. */ - public Long getTotalSlotMs() { - return totalSlotMs; - } - /** * Returns the query plan as a list of stages or {@code null} if a query plan is not available. * Each stage involves a number of steps that read from data sources, perform a series of @@ -680,16 +949,42 @@ public Schema getSchema() { return schema; } + /** + * Statistics for a search query. Populated as part of JobStatistics2. Provides information + * about how indexes are used in search queries. If an index is not used, you can retrieve + * debugging information about the reason why. + */ + public SearchStats getSearchStats() { + return searchStats; + } + + /** Statistics for metadata caching in BigLake tables. */ + public MetadataCacheStats getMetadataCacheStats() { + return metadataCacheStats; + } + + /** + * Standard SQL only: Returns a list of undeclared query parameters detected during a dry run + * validation. + */ + public List getQueryParameters() { + return queryParameters; + } + @Override ToStringHelper toStringHelper() { return super.toStringHelper() + .add("biEngineStats", biEngineStats) .add("billingTier", billingTier) .add("cacheHit", cacheHit) .add("totalBytesBilled", totalBytesBilled) .add("totalBytesProcessed", totalBytesProcessed) .add("queryPlan", queryPlan) .add("timeline", timeline) - .add("schema", schema); + .add("schema", schema) + .add("searchStats", searchStats) + .add("metadataCacheStats", metadataCacheStats) + .add("queryParameters", queryParameters); } @Override @@ -704,17 +999,24 @@ public final boolean equals(Object obj) { public final int hashCode() { return Objects.hash( baseHashCode(), + biEngineStats, billingTier, cacheHit, totalBytesBilled, totalBytesProcessed, queryPlan, - schema); + schema, + searchStats, + metadataCacheStats, + queryParameters); } @Override com.google.api.services.bigquery.model.JobStatistics toPb() { JobStatistics2 queryStatisticsPb = new JobStatistics2(); + if (biEngineStats != null) { + queryStatisticsPb.setBiEngineStatistics(biEngineStats.toPb()); + } queryStatisticsPb.setBillingTier(billingTier); queryStatisticsPb.setCacheHit(cacheHit); queryStatisticsPb.setDdlOperationPerformed(ddlOperationPerformed); @@ -722,14 +1024,18 @@ com.google.api.services.bigquery.model.JobStatistics toPb() { queryStatisticsPb.setTotalBytesBilled(totalBytesBilled); queryStatisticsPb.setTotalBytesProcessed(totalBytesProcessed); queryStatisticsPb.setTotalPartitionsProcessed(totalPartitionsProcessed); - queryStatisticsPb.setTotalSlotMs(totalSlotMs); if (ddlTargetTable != null) { queryStatisticsPb.setDdlTargetTable(ddlTargetTable.toPb()); } if (ddlTargetRoutine != null) { queryStatisticsPb.setDdlTargetRoutine(ddlTargetRoutine.toPb()); } - + if (dmlStats != null) { + queryStatisticsPb.setDmlStats(dmlStats.toPb()); + } + if (exportDataStats != null) { + queryStatisticsPb.setExportDataStatistics(exportDataStats.toPb()); + } if (referencedTables != null) { queryStatisticsPb.setReferencedTables( Lists.transform(referencedTables, TableId.TO_PB_FUNCTION)); @@ -746,6 +1052,15 @@ com.google.api.services.bigquery.model.JobStatistics toPb() { if (schema != null) { queryStatisticsPb.setSchema(schema.toPb()); } + if (searchStats != null) { + queryStatisticsPb.setSearchStatistics(searchStats.toPb()); + } + if (metadataCacheStats != null) { + queryStatisticsPb.setMetadataCacheStatistics(metadataCacheStats.toPb()); + } + if (queryParameters != null) { + queryStatisticsPb.setUndeclaredQueryParameters(queryParameters); + } return super.toPb().setQuery(queryStatisticsPb); } @@ -1085,7 +1400,8 @@ public static class Builder { private String name; private Long slotMs; - private Builder() {}; + private Builder() {} + ; Builder setName(String name) { this.name = name; @@ -1160,6 +1476,150 @@ static ReservationUsage fromPb( } } + // TransactionInfo contains information about a multi-statement transaction that may have + // associated with a job. + public static class TransactionInfo { + + // TransactionID is the system-generated identifier for the transaction. + private final String transactionId; + + public static class Builder { + + private String transactionId; + + private Builder() {} + ; + + Builder setTransactionId(String transactionId) { + this.transactionId = transactionId; + return this; + } + + TransactionInfo build() { + return new TransactionInfo(this); + } + } + + private TransactionInfo(Builder builder) { + this.transactionId = builder.transactionId; + } + + public String getTransactionId() { + return transactionId; + } + + static Builder newbuilder() { + return new Builder(); + } + + ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this).add("transactionId", transactionId); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + @Override + public boolean equals(Object obj) { + return obj == this + || obj != null + && obj.getClass().equals(TransactionInfo.class) + && Objects.equals(toPb(), ((TransactionInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(transactionId); + } + + com.google.api.services.bigquery.model.TransactionInfo toPb() { + com.google.api.services.bigquery.model.TransactionInfo transactionInfo = + new com.google.api.services.bigquery.model.TransactionInfo(); + transactionInfo.setTransactionId(transactionId); + return transactionInfo; + } + + static TransactionInfo fromPb( + com.google.api.services.bigquery.model.TransactionInfo transactionInfo) { + Builder builder = newbuilder(); + builder.setTransactionId(transactionInfo.getTransactionId()); + return builder.build(); + } + } + + // SessionInfo contains information about the session if this job is part of one. + public static class SessionInfo { + + // Id of the session + private final String sessionId; + + public static class Builder { + + private String sessionId; + + private Builder() {} + ; + + Builder setSessionId(String sessionId) { + this.sessionId = sessionId; + return this; + } + + SessionInfo build() { + return new SessionInfo(this); + } + } + + private SessionInfo(Builder builder) { + this.sessionId = builder.sessionId; + } + + public String getSessionId() { + return sessionId; + } + + static Builder newBuilder() { + return new Builder(); + } + + ToStringHelper toStringHelper() { + return MoreObjects.toStringHelper(this).add("sessionId", sessionId); + } + + @Override + public String toString() { + return toStringHelper().toString(); + } + + @Override + public boolean equals(Object obj) { + return obj == this + || obj != null + && obj.getClass().equals(SessionInfo.class) + && Objects.equals(toPb(), ((SessionInfo) obj).toPb()); + } + + @Override + public int hashCode() { + return Objects.hash(sessionId); + } + + com.google.api.services.bigquery.model.SessionInfo toPb() { + com.google.api.services.bigquery.model.SessionInfo sessionInfo = + new com.google.api.services.bigquery.model.SessionInfo(); + sessionInfo.setSessionId(sessionId); + return sessionInfo; + } + + static SessionInfo fromPb(com.google.api.services.bigquery.model.SessionInfo sessionInfo) { + SessionInfo.Builder builder = newBuilder(); + builder.setSessionId(sessionInfo.getSessionId()); + return builder.build(); + } + } + abstract static class Builder> { private Long creationTime; @@ -1169,6 +1629,9 @@ abstract static class Builder> private String parentJobId; private ScriptStatistics scriptStatistics; private List reservationUsage; + private TransactionInfo transactionInfo; + private SessionInfo sessionInfo; + private Long totalSlotMs; protected Builder() {} @@ -1178,6 +1641,9 @@ protected Builder(com.google.api.services.bigquery.model.JobStatistics statistic this.startTime = statisticsPb.getStartTime(); this.numChildJobs = statisticsPb.getNumChildJobs(); this.parentJobId = statisticsPb.getParentJobId(); + if (statisticsPb.getTotalSlotMs() != null) { + this.totalSlotMs = statisticsPb.getTotalSlotMs(); + } if (statisticsPb.getScriptStatistics() != null) { this.scriptStatistics = ScriptStatistics.fromPb(statisticsPb.getScriptStatistics()); } @@ -1185,6 +1651,12 @@ protected Builder(com.google.api.services.bigquery.model.JobStatistics statistic this.reservationUsage = Lists.transform(statisticsPb.getReservationUsage(), ReservationUsage.FROM_PB_FUNCTION); } + if (statisticsPb.getTransactionInfo() != null) { + this.transactionInfo = TransactionInfo.fromPb(statisticsPb.getTransactionInfo()); + } + if (statisticsPb.getSessionInfo() != null) { + this.sessionInfo = SessionInfo.fromPb(statisticsPb.getSessionInfo()); + } } @SuppressWarnings("unchecked") @@ -1207,6 +1679,11 @@ B setStartTime(Long startTime) { return self(); } + B setTotalSlotMs(Long totalSlotMs) { + this.totalSlotMs = totalSlotMs; + return self(); + } + abstract T build(); } @@ -1218,6 +1695,9 @@ protected JobStatistics(Builder builder) { this.parentJobId = builder.parentJobId; this.scriptStatistics = builder.scriptStatistics; this.reservationUsage = builder.reservationUsage; + this.transactionInfo = builder.transactionInfo; + this.sessionInfo = builder.sessionInfo; + this.totalSlotMs = builder.totalSlotMs; } /** Returns the creation time of the job in milliseconds since epoch. */ @@ -1261,6 +1741,21 @@ public List getReservationUsage() { return reservationUsage; } + /** Info indicates the transaction ID associated with the job, if any. */ + public TransactionInfo getTransactionInfo() { + return transactionInfo; + } + + /** Info of the session if this job is part of one. */ + public SessionInfo getSessionInfo() { + return sessionInfo; + } + + /** Returns the slot-milliseconds for the job. */ + public Long getTotalSlotMs() { + return totalSlotMs; + } + ToStringHelper toStringHelper() { return MoreObjects.toStringHelper(this) .add("creationTime", creationTime) @@ -1269,7 +1764,10 @@ ToStringHelper toStringHelper() { .add("numChildJobs", numChildJobs) .add("parentJobId", parentJobId) .add("scriptStatistics", scriptStatistics) - .add("reservationUsage", reservationUsage); + .add("reservationUsage", reservationUsage) + .add("transactionInfo", transactionInfo) + .add("sessionInfo", sessionInfo) + .add("totalSlotMs", totalSlotMs); } @Override @@ -1285,7 +1783,10 @@ final int baseHashCode() { numChildJobs, parentJobId, scriptStatistics, - reservationUsage); + reservationUsage, + transactionInfo, + sessionInfo, + totalSlotMs); } final boolean baseEquals(JobStatistics jobStatistics) { @@ -1300,6 +1801,7 @@ com.google.api.services.bigquery.model.JobStatistics toPb() { statistics.setStartTime(startTime); statistics.setNumChildJobs(numChildJobs); statistics.setParentJobId(parentJobId); + statistics.setTotalSlotMs(totalSlotMs); if (scriptStatistics != null) { statistics.setScriptStatistics(scriptStatistics.toPb()); } @@ -1307,6 +1809,12 @@ com.google.api.services.bigquery.model.JobStatistics toPb() { statistics.setReservationUsage( Lists.transform(reservationUsage, ReservationUsage.TO_PB_FUNCTION)); } + if (transactionInfo != null) { + statistics.setTransactionInfo(transactionInfo.toPb()); + } + if (sessionInfo != null) { + statistics.setSessionInfo(sessionInfo.toPb()); + } return statistics; } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LegacySQLTypeName.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LegacySQLTypeName.java index 56d66cb1a1..334e3290b5 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LegacySQLTypeName.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LegacySQLTypeName.java @@ -46,54 +46,78 @@ public LegacySQLTypeName apply(String constant) { /** Variable-length binary data. */ public static final LegacySQLTypeName BYTES = type.createAndRegister("BYTES").setStandardType(StandardSQLTypeName.BYTES); + /** Variable-length character (Unicode) data. */ public static final LegacySQLTypeName STRING = type.createAndRegister("STRING").setStandardType(StandardSQLTypeName.STRING); + /** A 64-bit signed integer value. */ public static final LegacySQLTypeName INTEGER = type.createAndRegister("INTEGER").setStandardType(StandardSQLTypeName.INT64); + /** A 64-bit IEEE binary floating-point value. */ public static final LegacySQLTypeName FLOAT = type.createAndRegister("FLOAT").setStandardType(StandardSQLTypeName.FLOAT64); + /** * A decimal value with 38 digits of precision and 9 digits of scale. Note, support for this type * is limited in legacy SQL. */ public static final LegacySQLTypeName NUMERIC = type.createAndRegister("NUMERIC").setStandardType(StandardSQLTypeName.NUMERIC); + /** * A decimal value with 76+ digits of precision (the 77th digit is partial) and 38 digits of scale */ public static final LegacySQLTypeName BIGNUMERIC = type.createAndRegister("BIGNUMERIC").setStandardType(StandardSQLTypeName.BIGNUMERIC); + /** A Boolean value (true or false). */ public static final LegacySQLTypeName BOOLEAN = type.createAndRegister("BOOLEAN").setStandardType(StandardSQLTypeName.BOOL); + /** Represents an absolute point in time, with microsecond precision. */ public static final LegacySQLTypeName TIMESTAMP = type.createAndRegister("TIMESTAMP").setStandardType(StandardSQLTypeName.TIMESTAMP); + /** Represents a logical calendar date. Note, support for this type is limited in legacy SQL. */ public static final LegacySQLTypeName DATE = type.createAndRegister("DATE").setStandardType(StandardSQLTypeName.DATE); + /** Represents a set of geographic points, represented as a Well Known Text (WKT) string. */ public static final LegacySQLTypeName GEOGRAPHY = type.createAndRegister("GEOGRAPHY").setStandardType(StandardSQLTypeName.GEOGRAPHY); + /** * Represents a time, independent of a specific date, to microsecond precision. Note, support for * this type is limited in legacy SQL. */ public static final LegacySQLTypeName TIME = type.createAndRegister("TIME").setStandardType(StandardSQLTypeName.TIME); + /** * Represents a year, month, day, hour, minute, second, and subsecond (microsecond precision). * Note, support for this type is limited in legacy SQL. */ public static final LegacySQLTypeName DATETIME = type.createAndRegister("DATETIME").setStandardType(StandardSQLTypeName.DATETIME); + /** A record type with a nested schema. */ public static final LegacySQLTypeName RECORD = type.createAndRegister("RECORD").setStandardType(StandardSQLTypeName.STRUCT); + /** Represents JSON data */ + public static final LegacySQLTypeName JSON = + type.createAndRegister("JSON").setStandardType(StandardSQLTypeName.JSON); + + /** Represents duration or amount of time. */ + public static final LegacySQLTypeName INTERVAL = + type.createAndRegister("INTERVAL").setStandardType(StandardSQLTypeName.INTERVAL); + + /** Represents a contiguous range of values. */ + public static final LegacySQLTypeName RANGE = + type.createAndRegister("RANGE").setStandardType(StandardSQLTypeName.RANGE); + private static Map standardToLegacyMap = new HashMap<>(); static { diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadConfiguration.java index 15e82fc914..58cf986706 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadConfiguration.java @@ -16,6 +16,7 @@ package com.google.cloud.bigquery; +import com.google.api.core.InternalApi; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; import com.google.cloud.bigquery.JobInfo.WriteDisposition; @@ -99,17 +100,17 @@ interface Builder { Builder setIgnoreUnknownValues(Boolean ignoreUnknownValues); /** - * [Experimental] Sets options allowing the schema of the destination table to be updated as a - * side effect of the load job. Schema update options are supported in two cases: when - * writeDisposition is WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination - * table is a partition of a table, specified by partition decorators. For normal tables, - * WRITE_TRUNCATE will always overwrite the schema. + * Sets options allowing the schema of the destination table to be updated as a side effect of + * the load job. Schema update options are supported in two cases: when writeDisposition is + * WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a + * partition of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE + * will always overwrite the schema. */ + @InternalApi Builder setSchemaUpdateOptions(List schemaUpdateOptions); - /** - * [Experimental] Sets automatic inference of the options and schema for CSV and JSON sources. - */ + /** Sets automatic inference of the options and schema for CSV and JSON sources. */ + @InternalApi Builder setAutodetect(Boolean autodetect); /** Sets the time partitioning specification for the destination table. */ @@ -125,6 +126,17 @@ interface Builder { */ Builder setUseAvroLogicalTypes(Boolean useAvroLogicalTypes); + /** + * Defines the list of possible SQL data types to which the source decimal values are converted. + * This list and the precision and the scale parameters of the decimal field determine the + * target type. In the order of NUMERIC, BIGNUMERIC, and STRING, a type is picked if it is in + * the specified list and if it supports the precision and the scale. STRING supports all + * precision and scale values. + * + * @param decimalTargetTypes decimalTargetType or {@code null} for none + */ + Builder setDecimalTargetTypes(List decimalTargetTypes); + LoadConfiguration build(); } @@ -191,18 +203,19 @@ interface Builder { DatastoreBackupOptions getDatastoreBackupOptions(); /** - * [Experimental] Returns options allowing the schema of the destination table to be updated as a - * side effect of the load job. Schema update options are supported in two cases: when - * writeDisposition is WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination - * table is a partition of a table, specified by partition decorators. For normal tables, - * WRITE_TRUNCATE will always overwrite the schema. + * Returns options allowing the schema of the destination table to be updated as a side effect of + * the load job. Schema update options are supported in two cases: when writeDisposition is + * WRITE_APPEND; when writeDisposition is WRITE_TRUNCATE and the destination table is a partition + * of a table, specified by partition decorators. For normal tables, WRITE_TRUNCATE will always + * overwrite the schema. */ + @InternalApi List getSchemaUpdateOptions(); /** - * [Experimental] Returns whether automatic inference of the options and schema for CSV and JSON - * sources is set. + * Returns whether automatic inference of the options and schema for CSV and JSON sources is set. */ + @InternalApi Boolean getAutodetect(); /** Returns the time partitioning specification defined for the destination table. */ @@ -214,6 +227,15 @@ interface Builder { /** Returns True/False. Indicates whether the logical type is interpreted. */ Boolean getUseAvroLogicalTypes(); + /** + * Returns the list of possible SQL data types to which the source decimal values are converted. + * This list and the precision and the scale parameters of the decimal field determine the target + * type. In the order of NUMERIC, BIGNUMERIC, and STRING, a type is picked if it is in the + * specified list and if it supports the precision and the scale. STRING supports all precision + * and scale values. + */ + List getDecimalTargetTypes(); + /** Returns a builder for the load configuration object. */ Builder toBuilder(); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadJobConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadJobConfiguration.java index 0eae67bd66..381942cd0b 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadJobConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadJobConfiguration.java @@ -18,10 +18,14 @@ import static com.google.common.base.Preconditions.checkNotNull; +import com.google.api.core.ApiFunction; import com.google.api.services.bigquery.model.JobConfigurationLoad; +import com.google.cloud.StringEnumType; +import com.google.cloud.StringEnumValue; import com.google.common.base.MoreObjects.ToStringHelper; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; import com.google.common.primitives.Ints; import java.util.List; import java.util.Map; @@ -37,7 +41,10 @@ public final class LoadJobConfiguration extends JobConfiguration implements Load private static final long serialVersionUID = -2673554846792429829L; private final List sourceUris; + private final String fileSetSpecType; + private final String columnNameCharacterMap; private final TableId destinationTable; + private final List decimalTargetTypes; private final EncryptionConfiguration destinationEncryptionConfiguration; private final JobInfo.CreateDisposition createDisposition; private final JobInfo.WriteDisposition writeDisposition; @@ -55,12 +62,69 @@ public final class LoadJobConfiguration extends JobConfiguration implements Load private final Long jobTimeoutMs; private final RangePartitioning rangePartitioning; private final HivePartitioningOptions hivePartitioningOptions; + private final String referenceFileSchemaUri; + private final List connectionProperties; + private final Boolean createSession; + private final String reservation; + private final String timeZone; + private final String dateFormat; + private final String datetimeFormat; + private final String timeFormat; + private final String timestampFormat; + private final SourceColumnMatch sourceColumnMatch; + private final List nullMarkers; + + public static final class SourceColumnMatch extends StringEnumValue { + private static final long serialVersionUID = 818920627219751207L; + private static final ApiFunction CONSTRUCTOR = + new ApiFunction() { + @Override + public SourceColumnMatch apply(String constant) { + return new SourceColumnMatch(constant); + } + }; + + private static final StringEnumType type = + new StringEnumType(SourceColumnMatch.class, CONSTRUCTOR); + + public static final SourceColumnMatch SOURCE_COLUMN_MATCH_UNSPECIFIED = + type.createAndRegister("SOURCE_COLUMN_MATCH_UNSPECIFIED"); + public static final SourceColumnMatch POSITION = type.createAndRegister("POSITION"); + + public static final SourceColumnMatch NAME = type.createAndRegister("NAME"); + + private SourceColumnMatch(String constant) { + super(constant); + } + + /** + * Get the SourceColumnMatch for the given String constant, and throw an exception if the + * constant is not recognized. + */ + public static SourceColumnMatch valueOfStrict(String constant) { + return type.valueOfStrict(constant); + } + + /** Get the SourceColumnMatch for the given String constant, and allow unrecognized values. */ + public static SourceColumnMatch valueOf(String constant) { + return type.valueOf(constant); + } + + /** Return the known values for SourceColumnMatch. */ + public static SourceColumnMatch[] values() { + return type.values(); + } + } public static final class Builder extends JobConfiguration.Builder implements LoadConfiguration.Builder { private List sourceUris; + private String fileSetSpecType; + private String columnNameCharacterMap; + private TableId destinationTable; + private List decimalTargetTypes; private EncryptionConfiguration destinationEncryptionConfiguration; private JobInfo.CreateDisposition createDisposition; private JobInfo.WriteDisposition writeDisposition; @@ -79,6 +143,17 @@ public static final class Builder extends JobConfiguration.Builder connectionProperties; + private Boolean createSession; + private String reservation; + private String timeZone; + private String dateFormat; + private String datetimeFormat; + private String timeFormat; + private String timestampFormat; + private SourceColumnMatch sourceColumnMatch; + private List nullMarkers; private Builder() { super(Type.LOAD); @@ -87,6 +162,7 @@ private Builder() { private Builder(LoadJobConfiguration loadConfiguration) { this(); this.destinationTable = loadConfiguration.destinationTable; + this.decimalTargetTypes = loadConfiguration.decimalTargetTypes; this.createDisposition = loadConfiguration.createDisposition; this.writeDisposition = loadConfiguration.writeDisposition; this.formatOptions = loadConfiguration.formatOptions; @@ -95,6 +171,8 @@ private Builder(LoadJobConfiguration loadConfiguration) { this.schema = loadConfiguration.schema; this.ignoreUnknownValues = loadConfiguration.ignoreUnknownValues; this.sourceUris = loadConfiguration.sourceUris; + this.fileSetSpecType = loadConfiguration.fileSetSpecType; + this.columnNameCharacterMap = loadConfiguration.columnNameCharacterMap; this.schemaUpdateOptions = loadConfiguration.schemaUpdateOptions; this.autodetect = loadConfiguration.autodetect; this.destinationEncryptionConfiguration = @@ -106,12 +184,26 @@ private Builder(LoadJobConfiguration loadConfiguration) { this.jobTimeoutMs = loadConfiguration.jobTimeoutMs; this.rangePartitioning = loadConfiguration.rangePartitioning; this.hivePartitioningOptions = loadConfiguration.hivePartitioningOptions; + this.referenceFileSchemaUri = loadConfiguration.referenceFileSchemaUri; + this.connectionProperties = loadConfiguration.connectionProperties; + this.createSession = loadConfiguration.createSession; + this.reservation = loadConfiguration.reservation; + this.timeZone = loadConfiguration.timeZone; + this.dateFormat = loadConfiguration.dateFormat; + this.datetimeFormat = loadConfiguration.datetimeFormat; + this.timeFormat = loadConfiguration.timeFormat; + this.timestampFormat = loadConfiguration.timestampFormat; + this.sourceColumnMatch = loadConfiguration.sourceColumnMatch; + this.nullMarkers = loadConfiguration.nullMarkers; } private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { this(); JobConfigurationLoad loadConfigurationPb = configurationPb.getLoad(); this.destinationTable = TableId.fromPb(loadConfigurationPb.getDestinationTable()); + if (loadConfigurationPb.getDecimalTargetTypes() != null) { + this.decimalTargetTypes = ImmutableList.copyOf(loadConfigurationPb.getDecimalTargetTypes()); + } if (loadConfigurationPb.getCreateDisposition() != null) { this.createDisposition = JobInfo.CreateDisposition.valueOf(loadConfigurationPb.getCreateDisposition()); @@ -127,6 +219,7 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur this.nullMarker = loadConfigurationPb.getNullMarker(); } if (loadConfigurationPb.getAllowJaggedRows() != null + || loadConfigurationPb.getPreserveAsciiControlCharacters() != null || loadConfigurationPb.getAllowQuotedNewlines() != null || loadConfigurationPb.getEncoding() != null || loadConfigurationPb.getFieldDelimiter() != null @@ -137,6 +230,10 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur .setEncoding(loadConfigurationPb.getEncoding()) .setFieldDelimiter(loadConfigurationPb.getFieldDelimiter()) .setQuote(loadConfigurationPb.getQuote()); + if (loadConfigurationPb.getPreserveAsciiControlCharacters() != null) { + builder.setPreserveAsciiControlCharacters( + loadConfigurationPb.getPreserveAsciiControlCharacters()); + } if (loadConfigurationPb.getAllowJaggedRows() != null) { builder.setAllowJaggedRows(loadConfigurationPb.getAllowJaggedRows()); } @@ -157,6 +254,12 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur if (loadConfigurationPb.getSourceUris() != null) { this.sourceUris = ImmutableList.copyOf(configurationPb.getLoad().getSourceUris()); } + if (loadConfigurationPb.getFileSetSpecType() != null) { + this.fileSetSpecType = loadConfigurationPb.getFileSetSpecType(); + } + if (loadConfigurationPb.getColumnNameCharacterMap() != null) { + this.columnNameCharacterMap = loadConfigurationPb.getColumnNameCharacterMap(); + } if (loadConfigurationPb.getSchemaUpdateOptions() != null) { ImmutableList.Builder schemaUpdateOptionsBuilder = new ImmutableList.Builder<>(); @@ -193,6 +296,41 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur this.hivePartitioningOptions = HivePartitioningOptions.fromPb(loadConfigurationPb.getHivePartitioningOptions()); } + if (loadConfigurationPb.getReferenceFileSchemaUri() != null) { + this.referenceFileSchemaUri = loadConfigurationPb.getReferenceFileSchemaUri(); + } + if (loadConfigurationPb.getConnectionProperties() != null) { + + this.connectionProperties = + Lists.transform( + loadConfigurationPb.getConnectionProperties(), ConnectionProperty.FROM_PB_FUNCTION); + } + createSession = loadConfigurationPb.getCreateSession(); + if (configurationPb.getReservation() != null) { + this.reservation = configurationPb.getReservation(); + } + if (loadConfigurationPb.getTimeZone() != null) { + this.timeZone = loadConfigurationPb.getTimeZone(); + } + if (loadConfigurationPb.getDateFormat() != null) { + this.dateFormat = loadConfigurationPb.getDateFormat(); + } + if (loadConfigurationPb.getDatetimeFormat() != null) { + this.datetimeFormat = loadConfigurationPb.getDatetimeFormat(); + } + if (loadConfigurationPb.getTimeFormat() != null) { + this.timeFormat = loadConfigurationPb.getTimeFormat(); + } + if (loadConfigurationPb.getTimestampFormat() != null) { + this.timestampFormat = loadConfigurationPb.getTimestampFormat(); + } + if (loadConfigurationPb.getSourceColumnMatch() != null) { + this.sourceColumnMatch = + SourceColumnMatch.valueOf(loadConfigurationPb.getSourceColumnMatch()); + } + if (loadConfigurationPb.getNullMarkers() != null) { + this.nullMarkers = loadConfigurationPb.getNullMarkers(); + } } @Override @@ -278,6 +416,45 @@ public Builder setSourceUris(List sourceUris) { return this; } + /** + * Defines how to interpret files denoted by URIs. By default the files are assumed to be data + * files (this can be specified explicitly via FILE_SET_SPEC_TYPE_FILE_SYSTEM_MATCH). A second + * option is "FILE_SET_SPEC_TYPE_NEW_LINE_DELIMITED_MANIFEST" which interprets each file as a + * manifest file, where each line is a reference to a file. + */ + public Builder setFileSetSpecType(String fileSetSpecType) { + this.fileSetSpecType = fileSetSpecType; + return this; + } + + /** + * [Optional] Character map supported for column names in CSV/Parquet loads. Defaults to STRICT + * and can be overridden by Project Config Service. Using this option with unsupporting load + * formats will result in an error. + * + * @see + * ColumnNameCharacterMap + */ + public Builder setColumnNameCharacterMap(String columnNameCharacterMap) { + this.columnNameCharacterMap = columnNameCharacterMap; + return this; + } + + /** + * Defines the list of possible SQL data types to which the source decimal values are converted. + * This list and the precision and the scale parameters of the decimal field determine the + * target type. In the order of NUMERIC, BIGNUMERIC, and STRING, a type is picked if it is in + * the specified list and if it supports the precision and the scale. STRING supports all + * precision and scale values. + * + * @param decimalTargetTypes decimalTargetType or {@code null} for none + */ + public Builder setDecimalTargetTypes(List decimalTargetTypes) { + this.decimalTargetTypes = decimalTargetTypes; + return this; + } + public Builder setAutodetect(Boolean autodetect) { this.autodetect = autodetect; return this; @@ -331,6 +508,96 @@ public Builder setHivePartitioningOptions(HivePartitioningOptions hivePartitioni return this; } + /** + * When creating an external table, the user can provide a reference file with the table schema. + * This is enabled for the following formats: AVRO, PARQUET, ORC. + * + * @param referenceFileSchemaUri or {@code null} for none + */ + public Builder setReferenceFileSchemaUri(String referenceFileSchemaUri) { + this.referenceFileSchemaUri = referenceFileSchemaUri; + return this; + } + + public Builder setConnectionProperties(List connectionProperties) { + this.connectionProperties = ImmutableList.copyOf(connectionProperties); + return this; + } + + public Builder setCreateSession(Boolean createSession) { + this.createSession = createSession; + return this; + } + + /** + * [Optional] The reservation that job would use. User can specify a reservation to execute the + * job. If reservation is not set, reservation is determined based on the rules defined by the + * reservation assignments. The expected format is + * `projects/{project}/locations/{location}/reservations/{reservation}`. + * + * @param reservation reservation or {@code null} for none + */ + public Builder setReservation(String reservation) { + this.reservation = reservation; + return this; + } + + /** + * [Experimental] Default time zone that will apply when parsing timestamp values that have no + * specific time zone. + */ + public Builder setTimeZone(String timeZone) { + this.timeZone = timeZone; + return this; + } + + /** Date format used for parsing DATE values. */ + public Builder setDateFormat(String dateFormat) { + this.dateFormat = dateFormat; + return this; + } + + /** Date format used for parsing DATETIME values. */ + public Builder setDatetimeFormat(String datetimeFormat) { + this.datetimeFormat = datetimeFormat; + return this; + } + + /** Date format used for parsing TIME values. */ + public Builder setTimeFormat(String timeFormat) { + this.timeFormat = timeFormat; + return this; + } + + /** Date format used for parsing TIMESTAMP values. */ + public Builder setTimestampFormat(String timestampFormat) { + this.timestampFormat = timestampFormat; + return this; + } + + /** + * Controls the strategy used to match loaded columns to the schema. If not set, a sensible + * default is chosen based on how the schema is provided. If autodetect is used, then columns + * are matched by name. Otherwise, columns are matched by position. This is done to keep the + * behavior backward-compatible. + */ + public Builder setSourceColumnMatch(SourceColumnMatch sourceColumnMatch) { + this.sourceColumnMatch = sourceColumnMatch; + return this; + } + + /** + * A list of strings represented as SQL NULL value in a CSV file. null_marker and null_markers + * can't be set at the same time. If null_marker is set, null_markers has to be not set. If + * null_markers is set, null_marker has to be not set. If both null_marker and null_markers are + * set at the same time, a user error would be thrown. Any strings listed in null_markers, + * including empty string would be interpreted as SQL NULL. This applies to all column types. + */ + public Builder setNullMarkers(List nullMarkers) { + this.nullMarkers = nullMarkers; + return this; + } + @Override public LoadJobConfiguration build() { return new LoadJobConfiguration(this); @@ -340,7 +607,10 @@ public LoadJobConfiguration build() { private LoadJobConfiguration(Builder builder) { super(builder); this.sourceUris = builder.sourceUris; + this.fileSetSpecType = builder.fileSetSpecType; + this.columnNameCharacterMap = builder.columnNameCharacterMap; this.destinationTable = builder.destinationTable; + this.decimalTargetTypes = builder.decimalTargetTypes; this.createDisposition = builder.createDisposition; this.writeDisposition = builder.writeDisposition; this.formatOptions = builder.formatOptions; @@ -358,6 +628,17 @@ private LoadJobConfiguration(Builder builder) { this.jobTimeoutMs = builder.jobTimeoutMs; this.rangePartitioning = builder.rangePartitioning; this.hivePartitioningOptions = builder.hivePartitioningOptions; + this.referenceFileSchemaUri = builder.referenceFileSchemaUri; + this.connectionProperties = builder.connectionProperties; + this.createSession = builder.createSession; + this.reservation = builder.reservation; + this.timeZone = builder.timeZone; + this.dateFormat = builder.dateFormat; + this.datetimeFormat = builder.datetimeFormat; + this.timeFormat = builder.timeFormat; + this.timestampFormat = builder.timestampFormat; + this.sourceColumnMatch = builder.sourceColumnMatch; + this.nullMarkers = builder.nullMarkers; } @Override @@ -390,6 +671,10 @@ public CsvOptions getCsvOptions() { return formatOptions instanceof CsvOptions ? (CsvOptions) formatOptions : null; } + public ParquetOptions getParquetOptions() { + return formatOptions instanceof ParquetOptions ? (ParquetOptions) formatOptions : null; + } + @Override public DatastoreBackupOptions getDatastoreBackupOptions() { return formatOptions instanceof DatastoreBackupOptions @@ -426,6 +711,25 @@ public List getSourceUris() { return sourceUris; } + public String getFileSetSpecType() { + return fileSetSpecType; + } + + /** + * Returns the column name character map used in CSV/Parquet loads. + * + * @see + * ColumnNameCharacterMap + */ + public String getColumnNameCharacterMap() { + return columnNameCharacterMap; + } + + public List getDecimalTargetTypes() { + return decimalTargetTypes; + } + public Boolean getAutodetect() { return autodetect; } @@ -469,6 +773,61 @@ public HivePartitioningOptions getHivePartitioningOptions() { return hivePartitioningOptions; } + public String getReferenceFileSchemaUri() { + return referenceFileSchemaUri; + } + + public List getConnectionProperties() { + return connectionProperties; + } + + public Boolean getCreateSession() { + return createSession; + } + + /** Returns the reservation associated with this job */ + public String getReservation() { + return reservation; + } + + /** + * Returns the time zone used when parsing timestamp values that don't have specific time zone + * information. + */ + public String getTimeZone() { + return timeZone; + } + + /** Returns the format used to parse DATE values. */ + public String getDateFormat() { + return dateFormat; + } + + /** Returns the format used to parse DATETIME values. */ + public String getDatetimeFormat() { + return datetimeFormat; + } + + /** Returns the format used to parse TIME values. */ + public String getTimeFormat() { + return timeFormat; + } + + /** Returns the format used to parse TIMESTAMP values. */ + public String getTimestampFormat() { + return timestampFormat; + } + + /** Returns the strategy used to match loaded columns to the schema, either POSITION or NAME. */ + public SourceColumnMatch getSourceColumnMatch() { + return sourceColumnMatch; + } + + /** Returns a list of strings represented as SQL NULL value in a CSV file. */ + public List getNullMarkers() { + return nullMarkers; + } + @Override public Builder toBuilder() { return new Builder(this); @@ -478,6 +837,7 @@ public Builder toBuilder() { ToStringHelper toStringHelper() { return super.toStringHelper() .add("destinationTable", destinationTable) + .add("decimalTargetTypes", decimalTargetTypes) .add("destinationEncryptionConfiguration", destinationEncryptionConfiguration) .add("createDisposition", createDisposition) .add("writeDisposition", writeDisposition) @@ -487,6 +847,8 @@ ToStringHelper toStringHelper() { .add("schema", schema) .add("ignoreUnknownValue", ignoreUnknownValues) .add("sourceUris", sourceUris) + .add("fileSetSpecType", fileSetSpecType) + .add("columnNameCharacterMap", columnNameCharacterMap) .add("schemaUpdateOptions", schemaUpdateOptions) .add("autodetect", autodetect) .add("timePartitioning", timePartitioning) @@ -495,7 +857,18 @@ ToStringHelper toStringHelper() { .add("labels", labels) .add("jobTimeoutMs", jobTimeoutMs) .add("rangePartitioning", rangePartitioning) - .add("hivePartitioningOptions", hivePartitioningOptions); + .add("hivePartitioningOptions", hivePartitioningOptions) + .add("referenceFileSchemaUri", referenceFileSchemaUri) + .add("connectionProperties", connectionProperties) + .add("createSession", createSession) + .add("reservation", reservation) + .add("timeZone", timeZone) + .add("dateFormat", dateFormat) + .add("datetimeFormat", datetimeFormat) + .add("timeFormat", timeFormat) + .add("timestampFormat", timestampFormat) + .add("sourceColumnMatch", sourceColumnMatch) + .add("nullMarkers", nullMarkers); } @Override @@ -539,12 +912,17 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { .setAllowJaggedRows(csvOptions.allowJaggedRows()) .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) .setEncoding(csvOptions.getEncoding()) + .setPreserveAsciiControlCharacters(csvOptions.getPreserveAsciiControlCharacters()) .setQuote(csvOptions.getQuote()); if (csvOptions.getSkipLeadingRows() != null) { // todo(mziccard) remove checked cast or comment when #1044 is closed loadConfigurationPb.setSkipLeadingRows(Ints.checkedCast(csvOptions.getSkipLeadingRows())); } } + if (getParquetOptions() != null) { + ParquetOptions parquetOptions = getParquetOptions(); + loadConfigurationPb.setParquetOptions(parquetOptions.toPb()); + } if (schema != null) { loadConfigurationPb.setSchema(schema.toPb()); } @@ -560,6 +938,15 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { if (sourceUris != null) { loadConfigurationPb.setSourceUris(ImmutableList.copyOf(sourceUris)); } + if (fileSetSpecType != null) { + loadConfigurationPb.setFileSetSpecType(fileSetSpecType); + } + if (columnNameCharacterMap != null) { + loadConfigurationPb.setColumnNameCharacterMap(columnNameCharacterMap); + } + if (decimalTargetTypes != null) { + loadConfigurationPb.setDecimalTargetTypes(ImmutableList.copyOf(decimalTargetTypes)); + } if (schemaUpdateOptions != null) { ImmutableList.Builder schemaUpdateOptionsBuilder = new ImmutableList.Builder<>(); for (JobInfo.SchemaUpdateOption schemaUpdateOption : schemaUpdateOptions) { @@ -591,6 +978,41 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { if (hivePartitioningOptions != null) { loadConfigurationPb.setHivePartitioningOptions(hivePartitioningOptions.toPb()); } + if (referenceFileSchemaUri != null) { + loadConfigurationPb.setReferenceFileSchemaUri(referenceFileSchemaUri); + } + if (connectionProperties != null) { + loadConfigurationPb.setConnectionProperties( + Lists.transform(connectionProperties, ConnectionProperty.TO_PB_FUNCTION)); + } + if (createSession != null) { + loadConfigurationPb.setCreateSession(createSession); + } + if (reservation != null) { + jobConfiguration.setReservation(reservation); + } + if (timeZone != null) { + loadConfigurationPb.setTimeZone(timeZone); + } + if (dateFormat != null) { + loadConfigurationPb.setDateFormat(dateFormat); + } + if (datetimeFormat != null) { + loadConfigurationPb.setDatetimeFormat(datetimeFormat); + } + if (timeFormat != null) { + loadConfigurationPb.setTimeFormat(timeFormat); + } + if (timestampFormat != null) { + loadConfigurationPb.setTimestampFormat(timestampFormat); + } + if (sourceColumnMatch != null) { + loadConfigurationPb.setSourceColumnMatch(sourceColumnMatch.toString()); + } + if (nullMarkers != null) { + loadConfigurationPb.setNullMarkers(nullMarkers); + } + jobConfiguration.setLoad(loadConfigurationPb); return jobConfiguration; } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/MaterializedViewDefinition.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/MaterializedViewDefinition.java index bf3a913a9c..69712bb3dd 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/MaterializedViewDefinition.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/MaterializedViewDefinition.java @@ -57,6 +57,25 @@ public abstract static class Builder @Override public abstract Builder setType(Type type); + /** + * Sets the time partitioning configuration for the materialized view. If not set, the + * materialized view is not time-partitioned. + */ + public abstract Builder setTimePartitioning(TimePartitioning timePartitioning); + + /** + * Sets the range partitioning configuration for the materialized view. Only one of + * timePartitioning and rangePartitioning should be specified. + */ + public abstract Builder setRangePartitioning(RangePartitioning rangePartitioning); + + /** + * Set the clustering configuration for the materialized view. If not set, the materialized view + * is not clustered. BigQuery supports clustering for both partitioned and non-partitioned + * materialized views. + */ + public abstract Builder setClustering(Clustering clustering); + /** Creates a {@code MaterializedViewDefinition} object. */ @Override public abstract MaterializedViewDefinition build(); @@ -86,6 +105,27 @@ public abstract static class Builder @Nullable public abstract Long getRefreshIntervalMs(); + /** + * Returns the time partitioning configuration for this table. If {@code null}, the table is not + * time-partitioned. + */ + @Nullable + public abstract TimePartitioning getTimePartitioning(); + + /** + * Returns the range partitioning configuration for this table. If {@code null}, the table is not + * range-partitioned. + */ + @Nullable + public abstract RangePartitioning getRangePartitioning(); + + /** + * Returns the clustering configuration for this table. If {@code null}, the table is not + * clustered. + */ + @Nullable + public abstract Clustering getClustering(); + /** Returns a builder for the {@code MaterializedViewDefinition} object. */ public abstract Builder toBuilder(); @@ -107,6 +147,15 @@ Table toPb() { materializedViewDefinition.setRefreshIntervalMs(getRefreshIntervalMs()); } tablePb.setMaterializedView(materializedViewDefinition); + if (getTimePartitioning() != null) { + tablePb.setTimePartitioning(getTimePartitioning().toPb()); + } + if (getRangePartitioning() != null) { + tablePb.setRangePartitioning(getRangePartitioning().toPb()); + } + if (getClustering() != null) { + tablePb.setClustering(getClustering().toPb()); + } return tablePb; } @@ -149,6 +198,15 @@ static MaterializedViewDefinition fromPb(Table tablePb) { if (materializedViewDefinition.getRefreshIntervalMs() != null) { builder.setRefreshIntervalMs(materializedViewDefinition.getRefreshIntervalMs()); } + if (tablePb.getTimePartitioning() != null) { + builder.setTimePartitioning(TimePartitioning.fromPb(tablePb.getTimePartitioning())); + } + if (tablePb.getRangePartitioning() != null) { + builder.setRangePartitioning(RangePartitioning.fromPb(tablePb.getRangePartitioning())); + } + if (tablePb.getClustering() != null) { + builder.setClustering(Clustering.fromPb(tablePb.getClustering())); + } } return builder.build(); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/MetadataCacheStats.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/MetadataCacheStats.java new file mode 100644 index 0000000000..482571d5f0 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/MetadataCacheStats.java @@ -0,0 +1,76 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.api.services.bigquery.model.MetadataCacheStatistics; +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nullable; + +/** + * Represents statistics for metadata caching in BigLake tables. + * + * @see BigLake Tables + */ +@AutoValue +public abstract class MetadataCacheStats implements Serializable { + + private static final long serialVersionUID = 1L; + + @AutoValue.Builder + public abstract static class Builder { + /** Sets the free form human-readable reason metadata caching was unused for the job. */ + public abstract MetadataCacheStats.Builder setTableMetadataCacheUsage( + List tableMetadataCacheUsage); + + /** Creates a @code MetadataCacheStats} object. */ + public abstract MetadataCacheStats build(); + } + + public abstract Builder toBuilder(); + + public static Builder newBuilder() { + return new AutoValue_MetadataCacheStats.Builder(); + } + + @Nullable + public abstract List getTableMetadataCacheUsage(); + + MetadataCacheStatistics toPb() { + MetadataCacheStatistics metadataCacheStatistics = new MetadataCacheStatistics(); + if (getTableMetadataCacheUsage() != null) { + metadataCacheStatistics.setTableMetadataCacheUsage( + getTableMetadataCacheUsage().stream() + .map(TableMetadataCacheUsage::toPb) + .collect(Collectors.toList())); + } + return metadataCacheStatistics; + } + + static MetadataCacheStats fromPb(MetadataCacheStatistics metadataCacheStatistics) { + Builder builder = newBuilder(); + if (metadataCacheStatistics.getTableMetadataCacheUsage() != null) { + builder.setTableMetadataCacheUsage( + metadataCacheStatistics.getTableMetadataCacheUsage().stream() + .map(TableMetadataCacheUsage::fromPb) + .collect(Collectors.toList())); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelId.java index 9356c69355..d363ed8359 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelId.java @@ -21,6 +21,7 @@ import com.google.api.services.bigquery.model.ModelReference; import com.google.common.base.Function; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Objects; @@ -61,8 +62,6 @@ public String getModel() { } private ModelId(String project, String dataset, String model) { - checkArgument(!isNullOrEmpty(dataset), "Provided dataset is null or empty"); - checkArgument(!isNullOrEmpty(model), "Provided model is null or empty"); this.project = project; this.dataset = dataset; this.model = model; @@ -105,4 +104,12 @@ ModelReference toPb() { static ModelId fromPb(ModelReference modelRef) { return new ModelId(modelRef.getProjectId(), modelRef.getDatasetId(), modelRef.getModelId()); } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.model.project", this.getProject()) + .put("bq.model.dataset", this.getDataset()) + .put("bq.model.id", this.getModel()) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelInfo.java index 83603cbd23..3039483b52 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ModelInfo.java @@ -26,6 +26,7 @@ import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Collections; import java.util.List; @@ -67,7 +68,7 @@ public Model apply(ModelInfo ModelInfo) { private final Long creationTime; private final Long lastModifiedTime; private final Long expirationTime; - private final Labels labels; + private final Annotations labels; private final String location; private final ImmutableList trainingRunList; private final ImmutableList featureColumnList; @@ -132,7 +133,7 @@ static class BuilderImpl extends Builder { private Long creationTime; private Long lastModifiedTime; private Long expirationTime; - private Labels labels = Labels.ZERO; + private Annotations labels = Annotations.ZERO; private String location; private List trainingRunList = Collections.emptyList(); private List labelColumnList = Collections.emptyList(); @@ -169,7 +170,7 @@ static class BuilderImpl extends Builder { this.creationTime = modelPb.getCreationTime(); this.lastModifiedTime = modelPb.getLastModifiedTime(); this.expirationTime = modelPb.getExpirationTime(); - this.labels = Labels.fromPb(modelPb.getLabels()); + this.labels = Annotations.fromPb(modelPb.getLabels()); this.location = modelPb.getLocation(); if (modelPb.getTrainingRuns() != null) { this.trainingRunList = modelPb.getTrainingRuns(); @@ -238,7 +239,7 @@ public Builder setModelId(ModelId modelId) { @Override public Builder setLabels(Map labels) { - this.labels = Labels.fromUser(labels); + this.labels = Annotations.fromUser(labels); return this; } @@ -453,4 +454,19 @@ Model toPb() { static ModelInfo fromPb(Model modelPb) { return new BuilderImpl(modelPb).build(); } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .putAll(this.getModelId().getOtelAttributes()) + .put("bq.model.type", getFieldAsString(this.getModelType())) + .put("bq.model.creation_time", getFieldAsString(this.getCreationTime())) + .put("bq.model.last_modified_time", getFieldAsString(this.getLastModifiedTime())) + .put("bq.model.expiration_time", getFieldAsString(this.getExpirationTime())) + .put("bq.model.location", getFieldAsString(this.getLocation())) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Parameter.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Parameter.java new file mode 100644 index 0000000000..9959feab91 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Parameter.java @@ -0,0 +1,70 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import javax.annotation.Nullable; + +/* Wrapper class for query parameters */ +@AutoValue +public abstract class Parameter { + Parameter() { + // Package private so users can't subclass it but AutoValue can. + } + + /** + * Returns the name of the query parameter. If unset, this is a positional parameter. Otherwise, + * should be unique within a query. + * + * @return value or {@code null} for none + */ + @Nullable + public abstract String getName(); + + /** Returns the value for a query parameter along with its type. */ + public abstract QueryParameterValue getValue(); + + /** Returns a builder pre-populated using the current values of this field. */ + public abstract Builder toBuilder(); + + /** Returns a builder for a {@code Parameter} object. */ + public static Builder newBuilder() { + return new AutoValue_Parameter.Builder(); + } + + @AutoValue.Builder + public abstract static class Builder { + + /** + * [Optional] Sets the name of the query parameter. If unset, this is a positional parameter. + * Otherwise, should be unique within a query. + * + * @param name name or {@code null} for none + */ + public abstract Builder setName(String name); + + /** + * Sets the the value for a query parameter along with its type. + * + * @param parameter parameter or {@code null} for none + */ + public abstract Builder setValue(QueryParameterValue parameter); + + /** Creates a {@code Parameter} object. */ + public abstract Parameter build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ParquetOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ParquetOptions.java new file mode 100644 index 0000000000..cc85eeda68 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ParquetOptions.java @@ -0,0 +1,157 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.common.base.MoreObjects; +import java.util.Objects; + +public class ParquetOptions extends FormatOptions { + + private static final long serialVersionUID = 1993L; + + private final Boolean enableListInference; + private final Boolean enumAsString; + private final String mapTargetType; + + public Boolean getEnableListInference() { + return enableListInference; + } + + public Boolean getEnumAsString() { + return enumAsString; + } + + /** Returns how the Parquet map is represented. */ + public String getMapTargetType() { + return mapTargetType; + } + + /** A builder for {@code ParquetOptions} objects. */ + public static final class Builder { + private Boolean enableListInference; + private Boolean enumAsString; + private String mapTargetType; + + private Builder() {} + + private Builder(ParquetOptions parquetOptions) { + this.enableListInference = parquetOptions.enableListInference; + this.enumAsString = parquetOptions.enumAsString; + this.mapTargetType = parquetOptions.mapTargetType; + } + + public Builder setEnableListInference(Boolean enableListInference) { + this.enableListInference = enableListInference; + return this; + } + + public Builder setEnumAsString(Boolean enumAsString) { + this.enumAsString = enumAsString; + return this; + } + + /** + * [Optional] Indicates how to represent a Parquet map if present. + * + * @see + * MapTargetType + */ + public Builder setMapTargetType(String mapTargetType) { + this.mapTargetType = mapTargetType; + return this; + } + + public ParquetOptions build() { + return new ParquetOptions(this); + } + } + + /** Returns a builder for the {@link ParquetOptions} object. */ + public Builder toBuilder() { + return new Builder(this); + } + + ParquetOptions(Builder builder) { + super(FormatOptions.PARQUET); + enableListInference = builder.enableListInference; + enumAsString = builder.enumAsString; + mapTargetType = builder.mapTargetType; + } + + @Override + public String toString() { + return MoreObjects.toStringHelper(this) + .add("enableListInference", enableListInference) + .add("enumAsString", enumAsString) + .add("mapTargetType", mapTargetType) + .toString(); + } + + @Override + public final int hashCode() { + return Objects.hash(enableListInference, enumAsString, mapTargetType); + } + + @Override + public final boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || !obj.getClass().equals(ParquetOptions.class)) { + return false; + } + ParquetOptions other = (ParquetOptions) obj; + return enableListInference == other.enableListInference + && enumAsString == other.enumAsString + && Objects.equals(mapTargetType, ((ParquetOptions) obj).getMapTargetType()); + } + + /** Returns a builder for a {@link ParquetOptions} object. */ + public static ParquetOptions.Builder newBuilder() { + return new ParquetOptions.Builder(); + } + + static ParquetOptions fromPb( + com.google.api.services.bigquery.model.ParquetOptions parquetOptions) { + Builder builder = newBuilder(); + if (parquetOptions.getEnableListInference() != null) { + builder.setEnableListInference(parquetOptions.getEnableListInference()); + } + if (parquetOptions.getEnumAsString() != null) { + builder.setEnumAsString(parquetOptions.getEnumAsString()); + } + if (parquetOptions.getMapTargetType() != null) { + builder.setMapTargetType(parquetOptions.getMapTargetType()); + } + return builder.build(); + } + + com.google.api.services.bigquery.model.ParquetOptions toPb() { + com.google.api.services.bigquery.model.ParquetOptions parquetOptions = + new com.google.api.services.bigquery.model.ParquetOptions(); + if (enableListInference != null) { + parquetOptions.setEnableListInference(enableListInference); + } + if (enumAsString != null) { + parquetOptions.setEnumAsString(enumAsString); + } + if (mapTargetType != null) { + parquetOptions.setMapTargetType(mapTargetType); + } + return parquetOptions; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/PrimaryKey.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/PrimaryKey.java new file mode 100644 index 0000000000..a8474cf0fb --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/PrimaryKey.java @@ -0,0 +1,68 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import com.google.common.annotations.VisibleForTesting; +import java.io.Serializable; +import java.util.List; +import javax.annotation.Nullable; + +@AutoValue +public abstract class PrimaryKey implements Serializable { + public static PrimaryKey.Builder newBuilder() { + return new AutoValue_PrimaryKey.Builder(); + } + + static PrimaryKey fromPb( + com.google.api.services.bigquery.model.TableConstraints.PrimaryKey primaryKey) { + PrimaryKey.Builder builder = newBuilder(); + + if (primaryKey.getColumns() != null) { + builder.setColumns(primaryKey.getColumns()); + } + + return builder.build(); + } + + com.google.api.services.bigquery.model.TableConstraints.PrimaryKey toPb() { + + com.google.api.services.bigquery.model.TableConstraints.PrimaryKey primaryKey = + new com.google.api.services.bigquery.model.TableConstraints.PrimaryKey(); + if (getColumns() != null) { + primaryKey.setColumns(getColumns()); + } + return primaryKey; + } + + @Nullable + public abstract List getColumns(); + + /** Returns a builder for primary key. */ + @VisibleForTesting + public abstract PrimaryKey.Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + + /** The column names that are primary keys. * */ + public abstract PrimaryKey.Builder setColumns(List columns); + + /** Creates a {@code PrimaryKey} object. */ + public abstract PrimaryKey build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryJobConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryJobConfiguration.java index 927ae846cd..a62fbb5008 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryJobConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryJobConfiguration.java @@ -47,6 +47,7 @@ public final class QueryJobConfiguration extends JobConfiguration { private final String query; private final ImmutableList positionalParameters; private final ImmutableMap namedParameters; + private final String parameterMode; private final TableId destinationTable; private final Map tableDefinitions; private final List userDefinedFunctions; @@ -55,6 +56,7 @@ public final class QueryJobConfiguration extends JobConfiguration { private final DatasetId defaultDataset; private final Priority priority; private final Boolean allowLargeResults; + private final Boolean createSession; private final Boolean useQueryCache; private final Boolean flattenResults; private final Boolean dryRun; @@ -71,6 +73,8 @@ public final class QueryJobConfiguration extends JobConfiguration { private final List connectionProperties; // maxResults is only used for fast query path private final Long maxResults; + private final JobCreationMode jobCreationMode; + private final String reservation; /** * Priority levels for a query. If not specified the priority is assumed to be {@link @@ -92,12 +96,28 @@ public enum Priority { BATCH } + /** Job Creation Mode provides different options on job creation. */ + public enum JobCreationMode { + /** Unspecified JobCreationMode, defaults to JOB_CREATION_REQUIRED. */ + JOB_CREATION_MODE_UNSPECIFIED, + /** Default. Job creation is always required. */ + JOB_CREATION_REQUIRED, + /** + * Job creation is optional. Returning immediate results is prioritized. BigQuery will + * automatically determine if a Job needs to be created. The conditions under which BigQuery can + * decide to not create a Job are subject to change. If Job creation is required, + * JOB_CREATION_REQUIRED mode should be used, which is the default. + */ + JOB_CREATION_OPTIONAL, + } + public static final class Builder extends JobConfiguration.Builder { private String query; private List positionalParameters = Lists.newArrayList(); private Map namedParameters = Maps.newHashMap(); + private String parameterMode; private TableId destinationTable; private Map tableDefinitions; private List userDefinedFunctions; @@ -106,6 +126,7 @@ public static final class Builder private DatasetId defaultDataset; private Priority priority; private Boolean allowLargeResults; + private Boolean createSession; private Boolean useQueryCache; private Boolean flattenResults; private Boolean dryRun; @@ -121,6 +142,8 @@ public static final class Builder private RangePartitioning rangePartitioning; private List connectionProperties; private Long maxResults; + private JobCreationMode jobCreationMode; + private String reservation; private Builder() { super(Type.QUERY); @@ -131,6 +154,7 @@ private Builder(QueryJobConfiguration jobConfiguration) { this.query = jobConfiguration.query; this.namedParameters = jobConfiguration.namedParameters; this.positionalParameters = jobConfiguration.positionalParameters; + this.parameterMode = jobConfiguration.parameterMode; this.destinationTable = jobConfiguration.destinationTable; this.tableDefinitions = jobConfiguration.tableDefinitions; this.userDefinedFunctions = jobConfiguration.userDefinedFunctions; @@ -139,6 +163,7 @@ private Builder(QueryJobConfiguration jobConfiguration) { this.defaultDataset = jobConfiguration.defaultDataset; this.priority = jobConfiguration.priority; this.allowLargeResults = jobConfiguration.allowLargeResults; + this.createSession = jobConfiguration.createSession; this.useQueryCache = jobConfiguration.useQueryCache; this.flattenResults = jobConfiguration.flattenResults; this.dryRun = jobConfiguration.dryRun; @@ -154,20 +179,29 @@ private Builder(QueryJobConfiguration jobConfiguration) { this.rangePartitioning = jobConfiguration.rangePartitioning; this.connectionProperties = jobConfiguration.connectionProperties; this.maxResults = jobConfiguration.maxResults; + this.jobCreationMode = jobConfiguration.jobCreationMode; + this.reservation = jobConfiguration.reservation; } private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { this(); JobConfigurationQuery queryConfigurationPb = configurationPb.getQuery(); this.query = queryConfigurationPb.getQuery(); + // Allows to get undeclared query parameters in jobstatistics + if (queryConfigurationPb.getQueryParameters() == null + && queryConfigurationPb.getParameterMode() != null) { + parameterMode = queryConfigurationPb.getParameterMode(); + } if (queryConfigurationPb.getQueryParameters() != null && !queryConfigurationPb.getQueryParameters().isEmpty()) { if (queryConfigurationPb.getQueryParameters().get(0).getName() == null) { + parameterMode = "POSITIONAL"; setPositionalParameters( Lists.transform( queryConfigurationPb.getQueryParameters(), POSITIONAL_PARAMETER_FROM_PB_FUNCTION)); } else { + parameterMode = "NAMED"; Map values = Maps.newHashMap(); for (QueryParameter queryParameterPb : queryConfigurationPb.getQueryParameters()) { checkNotNull(queryParameterPb.getName()); @@ -180,6 +214,7 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur } } allowLargeResults = queryConfigurationPb.getAllowLargeResults(); + createSession = queryConfigurationPb.getCreateSession(); useQueryCache = queryConfigurationPb.getUseQueryCache(); flattenResults = queryConfigurationPb.getFlattenResults(); useLegacySql = queryConfigurationPb.getUseLegacySql(); @@ -253,6 +288,9 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur queryConfigurationPb.getConnectionProperties(), ConnectionProperty.FROM_PB_FUNCTION); } + if (configurationPb.getReservation() != null) { + this.reservation = configurationPb.getReservation(); + } } /** Sets the BigQuery SQL query to execute. */ @@ -277,6 +315,16 @@ public Builder addPositionalParameter(QueryParameterValue value) { return this; } + /** + * Standard SQL only. Set to POSITIONAL to use positional (?) query parameters or to NAMED to + * use named (@myparam) query parameters in this query. + */ + public Builder setParameterMode(String parameterMode) { + checkNotNull(parameterMode); + this.parameterMode = parameterMode; + return this; + } + /** * Sets the query parameters to a list of positional query parameters to use in the query. * @@ -445,6 +493,16 @@ public Builder setPriority(Priority priority) { return this; } + /** + * Sets whether to create a new session. If {@code true} a random session id will be generated + * by BigQuery. If false, runs query with an existing session_id passed in ConnectionProperty, + * otherwise runs query in non-session mode." + */ + public Builder setCreateSession(Boolean createSession) { + this.createSession = createSession; + return this; + } + /** * Sets whether the job is enabled to create arbitrarily large results. If {@code true} the * query is allowed to create large results at a slight cost in performance. If {@code true} @@ -621,6 +679,28 @@ public Builder setMaxResults(Long maxResults) { return this; } + /** + * Provides different options on job creation. If not specified the job creation mode is assumed + * to be {@link JobCreationMode#JOB_CREATION_REQUIRED}. + */ + public Builder setJobCreationMode(JobCreationMode jobCreationMode) { + this.jobCreationMode = jobCreationMode; + return this; + } + + /** + * [Optional] The reservation that job would use. User can specify a reservation to execute the + * job. If reservation is not set, reservation is determined based on the rules defined by the + * reservation assignments. The expected format is + * `projects/{project}/locations/{location}/reservations/{reservation}`. + * + * @param reservation reservation or {@code null} for none + */ + public Builder setReservation(String reservation) { + this.reservation = reservation; + return this; + } + public QueryJobConfiguration build() { return new QueryJobConfiguration(this); } @@ -639,7 +719,9 @@ private QueryJobConfiguration(Builder builder) { } positionalParameters = ImmutableList.copyOf(builder.positionalParameters); namedParameters = ImmutableMap.copyOf(builder.namedParameters); + this.parameterMode = builder.parameterMode; this.allowLargeResults = builder.allowLargeResults; + this.createSession = builder.createSession; this.createDisposition = builder.createDisposition; this.defaultDataset = builder.defaultDataset; this.destinationTable = builder.destinationTable; @@ -663,6 +745,8 @@ private QueryJobConfiguration(Builder builder) { this.rangePartitioning = builder.rangePartitioning; this.connectionProperties = builder.connectionProperties; this.maxResults = builder.maxResults; + this.jobCreationMode = builder.jobCreationMode; + this.reservation = builder.reservation; } /** @@ -677,6 +761,15 @@ public Boolean allowLargeResults() { return allowLargeResults; } + /** + * Returns whether to create a new session. + * + * @see Create Sessions + */ + public Boolean createSession() { + return createSession; + } + /** * Returns whether the job is allowed to create new tables. * @@ -865,6 +958,16 @@ public Long getMaxResults() { return maxResults; } + /** Returns the job creation mode. */ + public JobCreationMode getJobCreationMode() { + return jobCreationMode; + } + + /** Returns the reservation associated with this job */ + public String getReservation() { + return reservation; + } + @Override public Builder toBuilder() { return new Builder(this); @@ -876,10 +979,12 @@ ToStringHelper toStringHelper() { .add("query", query) .add("positionalParameters", positionalParameters) .add("namedParameters", namedParameters) + .add("parameterMode", parameterMode) .add("destinationTable", destinationTable) .add("destinationEncryptionConfiguration", destinationEncryptionConfiguration) .add("defaultDataset", defaultDataset) .add("allowLargeResults", allowLargeResults) + .add("createSession", createSession) .add("flattenResults", flattenResults) .add("priority", priority) .add("tableDefinitions", tableDefinitions) @@ -897,7 +1002,9 @@ ToStringHelper toStringHelper() { .add("jobTimeoutMs", jobTimeoutMs) .add("labels", labels) .add("rangePartitioning", rangePartitioning) - .add("connectionProperties", connectionProperties); + .add("connectionProperties", connectionProperties) + .add("jobCreationMode", jobCreationMode) + .add("reservation", reservation); } @Override @@ -911,6 +1018,7 @@ public int hashCode() { return Objects.hash( baseHashCode(), allowLargeResults, + createSession, createDisposition, destinationTable, defaultDataset, @@ -919,6 +1027,7 @@ public int hashCode() { query, positionalParameters, namedParameters, + parameterMode, tableDefinitions, useQueryCache, userDefinedFunctions, @@ -933,7 +1042,8 @@ public int hashCode() { jobTimeoutMs, labels, rangePartitioning, - connectionProperties); + connectionProperties, + reservation); } @Override @@ -963,10 +1073,16 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { Lists.transform(namedParameters.entrySet().asList(), NAMED_PARAMETER_TO_PB_FUNCTION); queryConfigurationPb.setQueryParameters(queryParametersPb); } + if (parameterMode != null) { + queryConfigurationPb.setParameterMode(parameterMode); + } configurationPb.setDryRun(dryRun()); if (allowLargeResults != null) { queryConfigurationPb.setAllowLargeResults(allowLargeResults); } + if (createSession != null) { + queryConfigurationPb.setCreateSession(createSession); + } if (createDisposition != null) { queryConfigurationPb.setCreateDisposition(createDisposition.toString()); } @@ -1036,6 +1152,9 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { queryConfigurationPb.setConnectionProperties( Lists.transform(connectionProperties, ConnectionProperty.TO_PB_FUNCTION)); } + if (reservation != null) { + configurationPb.setReservation(reservation); + } configurationPb.setQuery(queryConfigurationPb); return configurationPb; } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java index 76e521d561..89e7ae85bf 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java @@ -16,32 +16,39 @@ package com.google.cloud.bigquery; -import static org.threeten.bp.temporal.ChronoField.HOUR_OF_DAY; -import static org.threeten.bp.temporal.ChronoField.MINUTE_OF_HOUR; -import static org.threeten.bp.temporal.ChronoField.NANO_OF_SECOND; -import static org.threeten.bp.temporal.ChronoField.SECOND_OF_MINUTE; +import static java.time.temporal.ChronoField.HOUR_OF_DAY; +import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; +import static java.time.temporal.ChronoField.NANO_OF_SECOND; +import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; +import com.google.api.core.ObsoleteApi; import com.google.api.services.bigquery.model.QueryParameterType; +import com.google.api.services.bigquery.model.RangeValue; import com.google.auto.value.AutoValue; import com.google.cloud.Timestamp; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; import com.google.common.io.BaseEncoding; +import com.google.gson.JsonObject; import java.io.Serializable; import java.math.BigDecimal; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.DateTimeParseException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; import javax.annotation.Nullable; -import org.threeten.bp.Instant; -import org.threeten.bp.ZoneOffset; -import org.threeten.bp.format.DateTimeFormatter; -import org.threeten.bp.format.DateTimeFormatterBuilder; -import org.threeten.bp.format.DateTimeParseException; +import org.threeten.extra.PeriodDuration; /** * A value for a QueryParameter along with its type. @@ -50,8 +57,6 @@ * for StandardSQLTypeName.INT64). Alternatively, an instance can be constructed by calling {@link * #of(Object, Class)} with the value and a Class object, which will use these mappings: * - *

    - * *

      *
    • Boolean: StandardSQLTypeName.BOOL *
    • String: StandardSQLTypeName.STRING @@ -61,6 +66,8 @@ *
    • Float: StandardSQLTypeName.FLOAT64 *
    • BigDecimal: StandardSQLTypeName.NUMERIC *
    • BigNumeric: StandardSQLTypeName.BIGNUMERIC + *
    • JSON: StandardSQLTypeName.JSON + *
    • INTERVAL: StandardSQLTypeName.INTERVAL *
    * *

    No other types are supported through that entry point. The other types can be created by @@ -72,7 +79,7 @@ @AutoValue public abstract class QueryParameterValue implements Serializable { - private static final DateTimeFormatter timestampFormatter = + static final DateTimeFormatter TIMESTAMP_FORMATTER = new DateTimeFormatterBuilder() .parseLenient() .append(DateTimeFormatter.ISO_LOCAL_DATE) @@ -90,15 +97,21 @@ public abstract class QueryParameterValue implements Serializable { .optionalEnd() .toFormatter() .withZone(ZoneOffset.UTC); - private static final DateTimeFormatter timestampValidator = + private static final DateTimeFormatter TIMESTAMP_VALIDATOR = new DateTimeFormatterBuilder() .parseLenient() - .append(timestampFormatter) + .append(TIMESTAMP_FORMATTER) .optionalStart() .appendOffsetId() .optionalEnd() .toFormatter() .withZone(ZoneOffset.UTC); + // Regex to identify >9 digits in the fraction part (e.g. `.123456789123`) + // Matches the dot, followed by 10+ digits (fractional part), followed by non-digits (like `+00`) + // or end of string + private static final Pattern ISO8601_TIMESTAMP_HIGH_PRECISION_PATTERN = + Pattern.compile("\\.(\\d{10,})(?:\\D|$)"); + private static final DateTimeFormatter dateFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd"); private static final DateTimeFormatter timeFormatter = DateTimeFormatter.ofPattern("HH:mm:ss.SSSSSS"); @@ -139,6 +152,13 @@ public Builder setStructValues(Map structValues) { abstract Builder setStructValuesInner(Map structValues); + /** Sets range values. The type must set to RANGE. */ + public Builder setRangeValues(Range range) { + return setRangeValuesInner(range); + } + + abstract Builder setRangeValuesInner(Range range); + /** Sets the parameter data type. */ public abstract Builder setType(StandardSQLTypeName type); @@ -182,6 +202,15 @@ public Map getStructValues() { @Nullable abstract Map getStructValuesInner(); + /** Returns the struct values of this parameter. The returned map, if not null, is immutable. */ + @Nullable + public Range getRangeValues() { + return getRangeValuesInner(); + } + + @Nullable + abstract Range getRangeValuesInner(); + /** Returns the data type of this parameter. */ public abstract StandardSQLTypeName getType(); @@ -254,19 +283,54 @@ public static QueryParameterValue string(String value) { return of(value, StandardSQLTypeName.STRING); } + /** Creates a {@code QueryParameterValue} object with a type of GEOGRAPHY. */ + public static QueryParameterValue geography(String value) { + return of(value, StandardSQLTypeName.GEOGRAPHY); + } + + /** + * Creates a {@code QueryParameterValue} object with a type of JSON. Currently, this is only + * supported in INSERT, not in query as a filter + */ + public static QueryParameterValue json(String value) { + return of(value, StandardSQLTypeName.JSON); + } + + /** + * Creates a {@code QueryParameterValue} object with a type of JSON. Currently, this is only + * supported in INSERT, not in query as a filter + */ + public static QueryParameterValue json(JsonObject value) { + return of(value, StandardSQLTypeName.JSON); + } + /** Creates a {@code QueryParameterValue} object with a type of BYTES. */ public static QueryParameterValue bytes(byte[] value) { return of(value, StandardSQLTypeName.BYTES); } - /** Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. */ + /** + * Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. + * + *

    This method only supports microsecond precision for timestamp. To use higher precision, + * prefer {@link #timestamp(String)} with an ISO8601 String + * + * @param value Microseconds since epoch, e.g. 1733945416000000 corresponds to 2024-12-11 + * 19:30:16.929Z + */ public static QueryParameterValue timestamp(Long value) { return of(value, StandardSQLTypeName.TIMESTAMP); } /** - * Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. Must be in the format - * "yyyy-MM-dd HH:mm:ss.SSSSSSZZ", e.g. "2014-08-19 12:41:35.220000+00:00". + * Creates a {@code QueryParameterValue} object with a type of TIMESTAMP. + * + *

    This method supports up to picosecond precision (12 digits) for timestamp. Input should + * conform to ISO8601 format. + * + *

    Should be in the format "yyyy-MM-dd HH:mm:ss.SSSSSS{SSSSSSS}Z", e.g. "2014-08-19 + * 12:41:35.123456Z" for microsecond precision and "2014-08-19 12:41:35.123456789123Z" for + * picosecond precision */ public static QueryParameterValue timestamp(String value) { return of(value, StandardSQLTypeName.TIMESTAMP); @@ -290,12 +354,38 @@ public static QueryParameterValue time(String value) { /** * Creates a {@code QueryParameterValue} object with a type of DATETIME. Must be in the format - * "yyyy-MM-dd HH:mm:ss.SSSSSS", e.g. ""2014-08-19 12:41:35.220000". + * "yyyy-MM-dd HH:mm:ss.SSSSSS", e.g. "2014-08-19 12:41:35.220000". */ public static QueryParameterValue dateTime(String value) { return of(value, StandardSQLTypeName.DATETIME); } + /** + * Creates a {@code QueryParameterValue} object with a type of INTERVAL. Must be in the canonical + * format "[sign]Y-M [sign]D [sign]H:M:S[.F]", e.g. "123-7 -19 0:24:12.000006" or ISO 8601 + * duration format, e.g. "P123Y7M-19DT0H24M12.000006S" + */ + public static QueryParameterValue interval(String value) { + return of(value, StandardSQLTypeName.INTERVAL); + } + + /** + * Creates a {@code QueryParameterValue} object with a type of INTERVAL. This method is obsolete. + * Use {@link #interval(String)} instead. + */ + @ObsoleteApi("Use interval(String) instead") + public static QueryParameterValue interval(PeriodDuration value) { + return of(value, StandardSQLTypeName.INTERVAL); + } + + /** Creates a {@code QueryParameterValue} object with a type of RANGE. */ + public static QueryParameterValue range(Range value) { + return QueryParameterValue.newBuilder() + .setRangeValues(value) + .setType(StandardSQLTypeName.RANGE) + .build(); + } + /** * Creates a {@code QueryParameterValue} object with a type of ARRAY, and an array element type * based on the given class. @@ -310,7 +400,11 @@ public static QueryParameterValue array(T[] array, Class clazz) { public static QueryParameterValue array(T[] array, StandardSQLTypeName type) { List listValues = new ArrayList<>(); for (T obj : array) { - listValues.add(QueryParameterValue.of(obj, type)); + if (type == StandardSQLTypeName.STRUCT) { + listValues.add((QueryParameterValue) obj); + } else { + listValues.add(QueryParameterValue.of(obj, type)); + } } return QueryParameterValue.newBuilder() .setArrayValues(listValues) @@ -335,6 +429,8 @@ private static StandardSQLTypeName classToType(Class type) { return StandardSQLTypeName.BOOL; } else if (String.class.isAssignableFrom(type)) { return StandardSQLTypeName.STRING; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.GEOGRAPHY; } else if (Integer.class.isAssignableFrom(type)) { return StandardSQLTypeName.INT64; } else if (Long.class.isAssignableFrom(type)) { @@ -347,6 +443,10 @@ private static StandardSQLTypeName classToType(Class type) { return StandardSQLTypeName.NUMERIC; } else if (Date.class.isAssignableFrom(type)) { return StandardSQLTypeName.DATE; + } else if (String.class.isAssignableFrom(type)) { + return StandardSQLTypeName.JSON; + } else if (JsonObject.class.isAssignableFrom(type)) { + return StandardSQLTypeName.JSON; } throw new IllegalArgumentException("Unsupported object type for QueryParameter: " + type); } @@ -384,18 +484,30 @@ private static String valueToStringOrNull(T value, StandardSQLTypeName type) break; case STRING: return value.toString(); + case GEOGRAPHY: + return value.toString(); + case JSON: + if (value instanceof String || value instanceof JsonObject) return value.toString(); + case INTERVAL: + if (value instanceof String || value instanceof PeriodDuration) return value.toString(); + break; case STRUCT: throw new IllegalArgumentException("Cannot convert STRUCT to String value"); case ARRAY: throw new IllegalArgumentException("Cannot convert ARRAY to String value"); + case RANGE: + throw new IllegalArgumentException("Cannot convert RANGE to String value"); case TIMESTAMP: if (value instanceof Long) { + // Timestamp passed as a Long only support Microsecond precision Timestamp timestamp = Timestamp.ofTimeMicroseconds((Long) value); - return timestampFormatter.format( + return TIMESTAMP_FORMATTER.format( Instant.ofEpochSecond(timestamp.getSeconds(), timestamp.getNanos())); } else if (value instanceof String) { - // verify that the String is in the right format - checkFormat(value, timestampValidator); + // Timestamp passed as a String can support up picosecond precision, however, + // DateTimeFormatter only supports nanosecond precision. Higher than nanosecond + // requires a custom validator. + validateTimestamp((String) value); return (String) value; } break; @@ -430,9 +542,42 @@ private static String valueToStringOrNull(T value, StandardSQLTypeName type) "Type " + type + " incompatible with " + value.getClass().getCanonicalName()); } + /** + * Internal helper method to check that the timestamp follows the expected String input of ISO8601 + * string. Allows the fractional portion of the timestamp to support up to 12 digits of precision + * (up to picosecond). + * + * @throws IllegalArgumentException if timestamp is invalid or exceeds picosecond precision + */ + @VisibleForTesting + static void validateTimestamp(String timestamp) { + // Check if the string has greater than nanosecond precision (>9 digits in fractional second) + Matcher matcher = ISO8601_TIMESTAMP_HIGH_PRECISION_PATTERN.matcher(timestamp); + if (matcher.find()) { + // Group 1 is the fractional second part of the ISO8601 string + String fraction = matcher.group(1); + // Pos 10-12 of the fractional second are guaranteed to be digits. The regex only + // matches the fraction section as long as they are digits. + if (fraction.length() > 12) { + throw new IllegalArgumentException( + "Fractional second portion of ISO8601 only supports up to picosecond (12 digits) in BigQuery"); + } + + // Replace the entire fractional second portion with just the nanosecond portion. + // The new timestamp will be validated against the JDK's DateTimeFormatter + String truncatedFraction = fraction.substring(0, 9); + timestamp = + new StringBuilder(timestamp) + .replace(matcher.start(1), matcher.end(1), truncatedFraction) + .toString(); + } + + // It is valid as long as DateTimeFormatter doesn't throw an exception + checkFormat(timestamp, TIMESTAMP_VALIDATOR); + } + private static void checkFormat(Object value, DateTimeFormatter formatter) { try { - formatter.parse((String) value); } catch (DateTimeParseException e) { throw new IllegalArgumentException(e.getMessage(), e); @@ -463,6 +608,22 @@ com.google.api.services.bigquery.model.QueryParameterValue toValuePb() { } valuePb.setStructValues(structValues); } + if (getType() == StandardSQLTypeName.RANGE) { + RangeValue rangeValue = new RangeValue(); + if (!getRangeValues().getStart().isNull()) { + com.google.api.services.bigquery.model.QueryParameterValue startValue = + new com.google.api.services.bigquery.model.QueryParameterValue(); + startValue.setValue(getRangeValues().getStart().getStringValue()); + rangeValue.setStart(startValue); + } + if (!getRangeValues().getEnd().isNull()) { + com.google.api.services.bigquery.model.QueryParameterValue endValue = + new com.google.api.services.bigquery.model.QueryParameterValue(); + endValue.setValue(getRangeValues().getEnd().getStringValue()); + rangeValue.setEnd(endValue); + } + valuePb.setRangeValue(rangeValue); + } return valuePb; } @@ -470,9 +631,15 @@ QueryParameterType toTypePb() { QueryParameterType typePb = new QueryParameterType(); typePb.setType(getType().toString()); if (getArrayType() != null) { - QueryParameterType arrayTypePb = new QueryParameterType(); - arrayTypePb.setType(getArrayType().toString()); - typePb.setArrayType(arrayTypePb); + List values = getArrayValues(); + if (getArrayType() == StandardSQLTypeName.STRUCT && values != null && values.size() != 0) { + QueryParameterType structType = values.get(0).toTypePb(); + typePb.setArrayType(structType); + } else { + QueryParameterType arrayTypePb = new QueryParameterType(); + arrayTypePb.setType(getArrayType().toString()); + typePb.setArrayType(arrayTypePb); + } } if (getStructTypes() != null) { List structTypes = new ArrayList<>(); @@ -484,6 +651,13 @@ QueryParameterType toTypePb() { } typePb.setStructTypes(structTypes); } + if (getType() == StandardSQLTypeName.RANGE + && getRangeValues() != null + && getRangeValues().getType() != null) { + QueryParameterType rangeTypePb = new QueryParameterType(); + rangeTypePb.setType(getRangeValues().getType().getType()); + typePb.setRangeElementType(rangeTypePb); + } return typePb; } @@ -532,6 +706,21 @@ static QueryParameterValue fromPb( } valueBuilder.setStructValues(structValues); } + } else if (type == StandardSQLTypeName.RANGE) { + Range.Builder range = Range.newBuilder(); + if (valuePb.getRangeValue() != null) { + com.google.api.services.bigquery.model.RangeValue rangeValuePb = valuePb.getRangeValue(); + if (rangeValuePb.getStart() != null && rangeValuePb.getStart().getValue() != null) { + range.setStart(valuePb.getRangeValue().getStart().getValue()); + } + if (rangeValuePb.getEnd() != null && rangeValuePb.getEnd().getValue() != null) { + range.setEnd(valuePb.getRangeValue().getEnd().getValue()); + } + } + if (typePb.getRangeElementType() != null && typePb.getRangeElementType().getType() != null) { + range.setType(FieldElementType.fromPb(typePb)); + } + valueBuilder.setRangeValues(range.build()); } else { valueBuilder.setValue(valuePb == null ? "" : valuePb.getValue()); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java index 7492809d61..c7033817c3 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryRequestInfo.java @@ -16,8 +16,10 @@ package com.google.cloud.bigquery; +import com.google.api.services.bigquery.model.DataFormatOptions; import com.google.api.services.bigquery.model.QueryParameter; import com.google.api.services.bigquery.model.QueryRequest; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; import com.google.common.base.MoreObjects; import com.google.common.base.Objects; import com.google.common.collect.Lists; @@ -37,10 +39,15 @@ final class QueryRequestInfo { private final String query; private final List queryParameters; private final String requestId; + private final Boolean createSession; private final Boolean useQueryCache; private final Boolean useLegacySql; + private final JobCreationMode jobCreationMode; + private final DataFormatOptions formatOptions; + private final String reservation; - QueryRequestInfo(QueryJobConfiguration config) { + QueryRequestInfo( + QueryJobConfiguration config, com.google.cloud.bigquery.DataFormatOptions dataFormatOptions) { this.config = config; this.connectionProperties = config.getConnectionProperties(); this.defaultDataset = config.getDefaultDataset(); @@ -51,11 +58,23 @@ final class QueryRequestInfo { this.query = config.getQuery(); this.queryParameters = config.toPb().getQuery().getQueryParameters(); this.requestId = UUID.randomUUID().toString(); + this.createSession = config.createSession(); this.useLegacySql = config.useLegacySql(); this.useQueryCache = config.useQueryCache(); + this.jobCreationMode = config.getJobCreationMode(); + this.formatOptions = dataFormatOptions.toPb(); + this.reservation = config.getReservation(); } - boolean isFastQuerySupported() { + boolean isFastQuerySupported(JobId jobId) { + // Fast query path is not possible if job is specified in the JobID object + // Respect Job field value in JobId specified by user. + // Specifying it will force the query to take the slower path. + if (jobId != null) { + if (jobId.getJob() != null) { + return false; + } + } return config.getClustering() == null && config.getCreateDisposition() == null && config.getDestinationEncryptionConfiguration() == null @@ -68,7 +87,8 @@ boolean isFastQuerySupported() { && config.getTableDefinitions() == null && config.getTimePartitioning() == null && config.getUserDefinedFunctions() == null - && config.getWriteDisposition() == null; + && config.getWriteDisposition() == null + && config.getJobCreationMode() != JobCreationMode.JOB_CREATION_REQUIRED; } QueryRequest toPb() { @@ -97,12 +117,24 @@ QueryRequest toPb() { if (queryParameters != null) { request.setQueryParameters(queryParameters); } + if (createSession != null) { + request.setCreateSession(createSession); + } if (useLegacySql != null) { request.setUseLegacySql(useLegacySql); } if (useQueryCache != null) { request.setUseQueryCache(useQueryCache); } + if (jobCreationMode != null) { + request.setJobCreationMode(jobCreationMode.toString()); + } + if (formatOptions != null) { + request.setFormatOptions(formatOptions); + } + if (reservation != null) { + request.setReservation(reservation); + } return request; } @@ -118,8 +150,12 @@ public String toString() { .add("query", query) .add("requestId", requestId) .add("queryParameters", queryParameters) + .add("createSession", createSession) .add("useQueryCache", useQueryCache) .add("useLegacySql", useLegacySql) + .add("jobCreationMode", jobCreationMode) + .add("formatOptions", formatOptions.getUseInt64Timestamp()) + .add("reservation", reservation) .toString(); } @@ -135,8 +171,12 @@ public int hashCode() { query, queryParameters, requestId, + createSession, useQueryCache, - useLegacySql); + useLegacySql, + jobCreationMode, + formatOptions, + reservation); } @Override diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Range.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Range.java new file mode 100644 index 0000000000..8d244fbeb0 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Range.java @@ -0,0 +1,127 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import static com.google.common.base.Preconditions.checkNotNull; + +import com.google.auto.value.AutoValue; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.common.collect.ImmutableMap; +import java.io.Serializable; +import javax.annotation.Nullable; + +@AutoValue +public abstract class Range implements Serializable { + private static final long serialVersionUID = 1L; + + /** Returns the start value of the range. A null value represents an unbounded start. */ + public FieldValue getStart() { + // The supported Range types [DATE, TIME, TIMESTAMP] are all Attribute.PRIMITIVE. + return FieldValue.of(Attribute.PRIMITIVE, getStartInner()); + } + + @Nullable + abstract String getStartInner(); + + /** Returns the end value of the range. A null value represents an unbounded end. */ + public FieldValue getEnd() { + // The supported Range types [DATE, TIME, TIMESTAMP] are all Attribute.PRIMITIVE. + return FieldValue.of(Attribute.PRIMITIVE, getEndInner()); + } + + @Nullable + abstract String getEndInner(); + + /** Returns the start and end values of this range. */ + public ImmutableMap getValues() { + ImmutableMap.Builder result = ImmutableMap.builder(); + if (!getStart().isNull()) { + result.put("start", getStart().getStringValue()); + } + if (!getEnd().isNull()) { + result.put("end", getEnd().getStringValue()); + } + return result.build(); + } + + /** Returns the type of the range. */ + @Nullable + public abstract FieldElementType getType(); + + public abstract Range.Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + + public Range.Builder setStart(String start) { + return setStartInner(start); + } + + abstract Range.Builder setStartInner(String start); + + public Range.Builder setEnd(String end) { + return setEndInner(end); + } + + abstract Range.Builder setEndInner(String end); + + public abstract Range.Builder setType(FieldElementType type); + + public abstract Range build(); + } + + /** Creates a range builder. Supported StandardSQLTypeName are [DATE, DATETIME, TIMESTAMP] */ + public static Builder newBuilder() { + return new AutoValue_Range.Builder(); + } + + public static Range of(String value) throws IllegalArgumentException { + return of(value, null); + } + + /** + * Creates an instance of {@code Range} from a string representation. + * + *

    The expected string format is: "[start, end)", where start and end are string format of + * [DATE, TIME, TIMESTAMP]. + */ + public static Range of(String value, FieldElementType type) throws IllegalArgumentException { + checkNotNull(value); + Range.Builder builder = newBuilder(); + if (type != null) { + builder.setType(type); + } + String[] startEnd = value.split(", ", 2); // Expect an extra space after ','. + if (startEnd.length != 2) { + throw new IllegalArgumentException( + String.format("Expected Range value string to be [start, end) and got %s", value)); + } + + String start = startEnd[0].substring(1); // Ignore the [ + String end = startEnd[1].substring(0, startEnd[1].length() - 1); // Ignore the ) + if (start.equalsIgnoreCase("UNBOUNDED") || (start.equalsIgnoreCase("NULL"))) { + builder.setStart(null); + } else { + builder.setStart(start); + } + if (end.equalsIgnoreCase("UNBOUNDED") || (end.equalsIgnoreCase("NULL"))) { + builder.setEnd(null); + } else { + builder.setEnd(end); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ReadClientConnectionConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ReadClientConnectionConfiguration.java new file mode 100644 index 0000000000..03cc2140e0 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ReadClientConnectionConfiguration.java @@ -0,0 +1,70 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import javax.annotation.Nullable; + +/** Represents BigQueryStorage Read client connection information. */ +@AutoValue +public abstract class ReadClientConnectionConfiguration implements Serializable { + + @AutoValue.Builder + public abstract static class Builder { + + /** + * Sets the total row count to page row count ratio used to determine whether to us the + * BigQueryStorage Read client to fetch result sets after the first page. + */ + public abstract Builder setTotalToPageRowCountRatio(Long ratio); + + /** + * Sets the minimum number of table rows in the query results used to determine whether to us + * the BigQueryStorage Read client to fetch result sets after the first page. + */ + public abstract Builder setMinResultSize(Long numRows); + + /** + * Sets the maximum number of table rows allowed in buffer before streaming them to the + * BigQueryResult. + */ + public abstract Builder setBufferSize(Long bufferSize); + + /** Creates a {@code ReadClientConnectionConfiguration} object. */ + public abstract ReadClientConnectionConfiguration build(); + } + + /** Returns the totalToPageRowCountRatio in this configuration. */ + @Nullable + public abstract Long getTotalToPageRowCountRatio(); + + /** Returns the minResultSize in this configuration. */ + @Nullable + public abstract Long getMinResultSize(); + + /** Returns the bufferSize in this configuration. */ + @Nullable + public abstract Long getBufferSize(); + + public abstract Builder toBuilder(); + + /** Returns a builder for a {@code ReadClientConnectionConfiguration} object. */ + public static Builder newBuilder() { + return new AutoValue_ReadClientConnectionConfiguration.Builder(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RemoteFunctionOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RemoteFunctionOptions.java new file mode 100644 index 0000000000..0f31e9c662 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RemoteFunctionOptions.java @@ -0,0 +1,139 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import java.util.Map; +import javax.annotation.Nullable; + +/** Represents Remote Function Options. Options for a remote user-defined function. */ +@AutoValue +public abstract class RemoteFunctionOptions implements Serializable { + + private static final long serialVersionUID = -7334249450657429792L; + + @AutoValue.Builder + public abstract static class Builder { + + /** + * Sets Endpoint argument Endpoint of the user-provided remote service, e.g. + * ```https://us-east1-my_gcf_project.cloudfunctions.net/remote_add``` + */ + public abstract Builder setEndpoint(String endpoint); + + /** + * Fully qualified name of the user-provided connection object which holds the authentication + * information to send requests to the remote service. Format: + * ```\"projects/{projectId}/locations/{locationId}/connections/{connectionId}\"``` + */ + public abstract Builder setConnection(String connection); + + /** + * User-defined context as a set of key/value pairs, which will be sent as function invocation + * context together with batched arguments in the requests to the remote service. The total + * number of bytes of keys and values must be less than 8KB. + */ + public abstract Builder setUserDefinedContext(Map userDefinedContext); + + /** + * Max number of rows in each batch sent to the remote service. If absent or if 0, BigQuery + * dynamically decides the number of rows in a batch. + */ + public abstract Builder setMaxBatchingRows(Long maxBatchingRows); + + /** Creates a {@code RemoteFunctionOptions} object. */ + public abstract RemoteFunctionOptions build(); + } + + /** + * Returns the endpoint of the user-provided service. + * + * @return String + */ + @Nullable + public abstract String getEndpoint(); + + /** + * Returns the fully qualified name of the user-provided connection object. + * + * @return String + */ + @Nullable + public abstract String getConnection(); + + /** + * Returns the user-defined context as a set of key/value pairs. + * + * @return Map<String, String> + */ + @Nullable + public abstract Map getUserDefinedContext(); + + /** + * Returns max number of rows in each batch sent to the remote service. + * + * @return Long + */ + @Nullable + public abstract Long getMaxBatchingRows(); + + /** + * Returns a builder pre-populated using the current values of this {@code RemoteFunctionOptions}. + */ + public abstract RemoteFunctionOptions.Builder toBuilder(); + + /** Returns a builder for a {@code RemoteFunctionOptions} object. */ + public static RemoteFunctionOptions.Builder newBuilder() { + return new AutoValue_RemoteFunctionOptions.Builder(); + } + + public com.google.api.services.bigquery.model.RemoteFunctionOptions toPb() { + com.google.api.services.bigquery.model.RemoteFunctionOptions remoteFunctionOptions = + new com.google.api.services.bigquery.model.RemoteFunctionOptions(); + if (getEndpoint() != null) { + remoteFunctionOptions.setEndpoint(getEndpoint()); + } + if (getConnection() != null) { + remoteFunctionOptions.setConnection(getConnection()); + } + if (getUserDefinedContext() != null) { + remoteFunctionOptions.setUserDefinedContext(getUserDefinedContext()); + } + if (getMaxBatchingRows() != null) { + remoteFunctionOptions.setMaxBatchingRows(getMaxBatchingRows()); + } + return remoteFunctionOptions; + } + + static RemoteFunctionOptions fromPb( + com.google.api.services.bigquery.model.RemoteFunctionOptions remoteFunctionOptionsPb) { + RemoteFunctionOptions.Builder builder = newBuilder(); + if (remoteFunctionOptionsPb.getEndpoint() != null) { + builder.setEndpoint(remoteFunctionOptionsPb.getEndpoint()); + } + if (remoteFunctionOptionsPb.getConnection() != null) { + builder.setConnection(remoteFunctionOptionsPb.getConnection()); + } + if (remoteFunctionOptionsPb.getUserDefinedContext() != null) { + builder.setUserDefinedContext(remoteFunctionOptionsPb.getUserDefinedContext()); + } + if (remoteFunctionOptionsPb.getMaxBatchingRows() != null) { + builder.setMaxBatchingRows(remoteFunctionOptionsPb.getMaxBatchingRows()); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Routine.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Routine.java index 2fbf1d67d8..5d99ccfbdc 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Routine.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Routine.java @@ -111,6 +111,12 @@ public Builder setReturnType(StandardSQLDataType returnType) { return this; } + @Override + public Builder setReturnTableType(StandardSQLTableType returnTableType) { + infoBuilder.setReturnTableType(returnTableType); + return this; + } + @Override public Builder setImportedLibraries(List libraries) { infoBuilder.setImportedLibraries(libraries); @@ -123,6 +129,18 @@ public Builder setBody(String body) { return this; } + @Override + public Builder setRemoteFunctionOptions(RemoteFunctionOptions remoteFunctionOptions) { + infoBuilder.setRemoteFunctionOptions(remoteFunctionOptions); + return this; + } + + @Override + public Builder setDataGovernanceType(String dataGovernanceType) { + infoBuilder.setDataGovernanceType(dataGovernanceType); + return this; + } + @Override public Routine build() { return new Routine(bigquery, infoBuilder); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineArgument.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineArgument.java index d36d1f2295..4bc17e6ae2 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineArgument.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineArgument.java @@ -90,7 +90,7 @@ public abstract static class Builder { /** Returns a builder pre-populated using the current values of this {@code RoutineArgument}. */ public abstract Builder toBuilder(); - /** Returns a builder for a {@Code RoutineArgument} object. */ + /** Returns a builder for a {@code RoutineArgument} object. */ public static Builder newBuilder() { return new AutoValue_RoutineArgument.Builder(); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineId.java index c5c3dadb3c..957b885da4 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineId.java @@ -21,6 +21,7 @@ import com.google.api.services.bigquery.model.RoutineReference; import com.google.common.base.Function; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Objects; @@ -62,8 +63,6 @@ public String getRoutine() { } private RoutineId(String project, String dataset, String routine) { - checkArgument(!isNullOrEmpty(dataset), "Provided dataset is null or empty"); - checkArgument(!isNullOrEmpty(routine), "Provided routine is null or empty"); this.project = project; this.dataset = dataset; this.routine = routine; @@ -108,4 +107,12 @@ static RoutineId fromPb(RoutineReference routineRef) { return new RoutineId( routineRef.getProjectId(), routineRef.getDatasetId(), routineRef.getRoutineId()); } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.routine.project", this.getProject()) + .put("bq.routine.dataset", this.getDataset()) + .put("bq.routine.id", this.getRoutine()) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineInfo.java index 1f9c252d2c..c13b90b413 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/RoutineInfo.java @@ -24,6 +24,7 @@ import com.google.common.base.MoreObjects; import com.google.common.base.Strings; import com.google.common.collect.Lists; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Collections; import java.util.List; @@ -67,8 +68,12 @@ public Routine apply(RoutineInfo routineInfo) { private final String language; private final List argumentList; private final StandardSQLDataType returnType; + private final StandardSQLTableType returnTableType; private final List importedLibrariesList; private final String body; + private final RemoteFunctionOptions remoteFunctionOptions; + + private final String dataGovernanceType; public abstract static class Builder { @@ -113,6 +118,9 @@ public abstract static class Builder { */ public abstract Builder setReturnType(StandardSQLDataType returnType); + /** Optional. Set only if Routine is a "TABLE_VALUED_FUNCTION". */ + public abstract Builder setReturnTableType(StandardSQLTableType returnTableType); + /** * Optional. If language = "JAVASCRIPT", this field stores the path of the imported JAVASCRIPT * libraries as a list of gs:// URLs. @@ -144,6 +152,21 @@ public abstract static class Builder { */ public abstract Builder setBody(String body); + /** + * Optional. Remote function specific options. + * + * @param remoteFunctionOptions + * @return + */ + public abstract Builder setRemoteFunctionOptions(RemoteFunctionOptions remoteFunctionOptions); + + /** + * Sets the data governance type for the Builder (e.g. DATA_MASKING). + * + *

    See https://cloud.google.com/bigquery/docs/reference/rest/v2/routines + */ + public abstract Builder setDataGovernanceType(String dataGovernanceType); + /** Creates a {@code RoutineInfo} object. */ public abstract RoutineInfo build(); } @@ -159,8 +182,12 @@ static class BuilderImpl extends Builder { private String language; private List argumentList; private StandardSQLDataType returnType; + private StandardSQLTableType returnTableType; private List importedLibrariesList; private String body; + private RemoteFunctionOptions remoteFunctionOptions; + + private String dataGovernanceType; BuilderImpl() {} @@ -175,8 +202,11 @@ static class BuilderImpl extends Builder { this.language = routineInfo.language; this.argumentList = routineInfo.argumentList; this.returnType = routineInfo.returnType; + this.returnTableType = routineInfo.returnTableType; this.importedLibrariesList = routineInfo.importedLibrariesList; this.body = routineInfo.body; + this.remoteFunctionOptions = routineInfo.remoteFunctionOptions; + this.dataGovernanceType = routineInfo.dataGovernanceType; } BuilderImpl(Routine routinePb) { @@ -195,12 +225,20 @@ static class BuilderImpl extends Builder { if (routinePb.getReturnType() != null) { this.returnType = StandardSQLDataType.fromPb(routinePb.getReturnType()); } + if (routinePb.getReturnTableType() != null) { + this.returnTableType = StandardSQLTableType.fromPb(routinePb.getReturnTableType()); + } if (routinePb.getImportedLibraries() == null) { this.importedLibrariesList = Collections.emptyList(); } else { this.importedLibrariesList = routinePb.getImportedLibraries(); } this.body = routinePb.getDefinitionBody(); + if (routinePb.getRemoteFunctionOptions() != null) { + this.remoteFunctionOptions = + RemoteFunctionOptions.fromPb(routinePb.getRemoteFunctionOptions()); + } + this.dataGovernanceType = routinePb.getDataGovernanceType(); } @Override @@ -263,6 +301,12 @@ public Builder setReturnType(StandardSQLDataType returnType) { return this; } + @Override + public Builder setReturnTableType(StandardSQLTableType returnTableType) { + this.returnTableType = returnTableType; + return this; + } + @Override public Builder setImportedLibraries(List importedLibrariesList) { this.importedLibrariesList = importedLibrariesList; @@ -275,6 +319,18 @@ public Builder setBody(String body) { return this; } + @Override + public Builder setRemoteFunctionOptions(RemoteFunctionOptions remoteFunctionOptions) { + this.remoteFunctionOptions = remoteFunctionOptions; + return this; + } + + @Override + public Builder setDataGovernanceType(String dataGovernanceType) { + this.dataGovernanceType = dataGovernanceType; + return this; + } + @Override public RoutineInfo build() { return new RoutineInfo(this); @@ -292,8 +348,11 @@ public RoutineInfo build() { this.language = builder.language; this.argumentList = builder.argumentList; this.returnType = builder.returnType; + this.returnTableType = builder.returnTableType; this.importedLibrariesList = builder.importedLibrariesList; this.body = builder.body; + this.remoteFunctionOptions = builder.remoteFunctionOptions; + this.dataGovernanceType = builder.dataGovernanceType; } /** Returns the RoutineId identified for the routine resource. * */ @@ -350,6 +409,11 @@ public StandardSQLDataType getReturnType() { return returnType; } + /** If specified, returns the table type returned from the routine. */ + public StandardSQLTableType getReturnTableType() { + return returnTableType; + } + /** * Returns the list of imported libraries for the routine. Only relevant for routines implemented * using the JAVASCRIPT language. @@ -363,6 +427,17 @@ public String getBody() { return body; } + /** Returns the Remote function specific options. */ + public RemoteFunctionOptions getRemoteFunctionOptions() { + return remoteFunctionOptions; + } + ; + + /** Returns the data governance type of the routine, e.g. DATA_MASKING. */ + public String getDataGovernanceType() { + return dataGovernanceType; + } + /** Returns a builder pre-populated using the current values of this routine. */ public Builder toBuilder() { return new BuilderImpl(this); @@ -381,8 +456,11 @@ public String toString() { .add("language", language) .add("arguments", argumentList) .add("returnType", returnType) + .add("returnTableType", returnTableType) .add("importedLibrariesList", importedLibrariesList) .add("body", body) + .add("remoteFunctionOptions", remoteFunctionOptions) + .add("dataGovernanceType", dataGovernanceType) .toString(); } @@ -399,8 +477,11 @@ public int hashCode() { language, argumentList, returnType, + returnTableType, importedLibrariesList, - body); + body, + remoteFunctionOptions, + dataGovernanceType); } @Override @@ -438,7 +519,8 @@ Routine toPb() { .setDescription(getDescription()) .setDeterminismLevel(getDeterminismLevel()) .setLastModifiedTime(getLastModifiedTime()) - .setLanguage(getLanguage()); + .setLanguage(getLanguage()) + .setDataGovernanceType(getDataGovernanceType()); if (getRoutineId() != null) { routinePb.setRoutineReference(getRoutineId().toPb()); } @@ -448,10 +530,32 @@ Routine toPb() { if (getReturnType() != null) { routinePb.setReturnType(getReturnType().toPb()); } + if (getReturnTableType() != null) { + routinePb.setReturnTableType(getReturnTableType().toPb()); + } + if (getRemoteFunctionOptions() != null) { + routinePb.setRemoteFunctionOptions(getRemoteFunctionOptions().toPb()); + } + if (getImportedLibraries() != null) { + routinePb.setImportedLibraries(getImportedLibraries()); + } return routinePb; } static RoutineInfo fromPb(Routine routinePb) { return new BuilderImpl(routinePb).build(); } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .putAll(this.getRoutineId().getOtelAttributes()) + .put("bq.routine.type", getFieldAsString(this.getRoutineType())) + .put("bq.routine.creation_time", getFieldAsString(this.getCreationTime())) + .put("bq.routine.last_modified_time", getFieldAsString(this.getLastModifiedTime())) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/SearchStats.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/SearchStats.java new file mode 100644 index 0000000000..237b83ca72 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/SearchStats.java @@ -0,0 +1,92 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.api.services.bigquery.model.SearchStatistics; +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nullable; + +/** Represents Search statistics information of a search query. */ +@AutoValue +public abstract class SearchStats implements Serializable { + + @AutoValue.Builder + public abstract static class Builder { + + /** + * Specifies index usage mode for the query. + * + * @param indexUsageMode, has three modes UNUSED, PARTIALLY_USED, and FULLY_USED + */ + public abstract Builder setIndexUsageMode(String indexUsageMode); + + /** + * When index_usage_mode is UNUSED or PARTIALLY_USED, this field explains why index was not used + * in all or part of the search query. If index_usage_mode is FULLY_USED, this field is not + * populated. + * + * @param indexUnusedReasons + */ + public abstract Builder setIndexUnusedReasons(List indexUnusedReasons); + + /** Creates a @code SearchStats} object. */ + public abstract SearchStats build(); + } + + public abstract Builder toBuilder(); + + public static Builder newBuilder() { + return new AutoValue_SearchStats.Builder(); + } + + @Nullable + public abstract String getIndexUsageMode(); + + @Nullable + public abstract List getIndexUnusedReasons(); + + SearchStatistics toPb() { + SearchStatistics searchStatistics = new SearchStatistics(); + if (getIndexUsageMode() != null) { + searchStatistics.setIndexUsageMode(getIndexUsageMode()); + } + if (getIndexUnusedReasons() != null) { + searchStatistics.setIndexUnusedReasons( + getIndexUnusedReasons().stream() + .map(IndexUnusedReason::toPb) + .collect(Collectors.toList())); + } + return searchStatistics; + } + + static SearchStats fromPb(SearchStatistics searchStatistics) { + Builder builder = newBuilder(); + if (searchStatistics.getIndexUsageMode() != null) { + builder.setIndexUsageMode(searchStatistics.getIndexUsageMode()); + } + if (searchStatistics.getIndexUnusedReasons() != null) { + builder.setIndexUnusedReasons( + searchStatistics.getIndexUnusedReasons().stream() + .map(IndexUnusedReason::fromPb) + .collect(Collectors.toList())); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/SnapshotTableDefinition.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/SnapshotTableDefinition.java new file mode 100644 index 0000000000..ad0aeb0ce5 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/SnapshotTableDefinition.java @@ -0,0 +1,112 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.api.client.util.DateTime; +import com.google.api.core.BetaApi; +import com.google.api.services.bigquery.model.Table; +import com.google.auto.value.AutoValue; +import com.google.common.annotations.VisibleForTesting; +import javax.annotation.Nullable; + +@AutoValue +@BetaApi +public abstract class SnapshotTableDefinition extends TableDefinition { + + private static final long serialVersionUID = 2113445776046717526L; + + @AutoValue.Builder + public abstract static class Builder + extends TableDefinition.Builder { + + /** Reference describing the ID of the table that was snapshot. * */ + public abstract Builder setBaseTableId(TableId baseTableId); + + /** + * The time at which the base table was snapshot. This value is reported in the JSON response + * using RFC3339 format. * + */ + public abstract Builder setSnapshotTime(String dateTime); + + public abstract Builder setTimePartitioning(TimePartitioning timePartitioning); + + public abstract Builder setRangePartitioning(RangePartitioning rangePartitioning); + + public abstract Builder setClustering(Clustering clustering); + + /** Creates a {@code SnapshotTableDefinition} object. */ + public abstract SnapshotTableDefinition build(); + } + + @Nullable + public abstract TableId getBaseTableId(); + + @Nullable + public abstract String getSnapshotTime(); + + @Nullable + public abstract TimePartitioning getTimePartitioning(); + + @Nullable + public abstract RangePartitioning getRangePartitioning(); + + @Nullable + public abstract Clustering getClustering(); + + /** Returns a builder for a snapshot table definition. */ + public static SnapshotTableDefinition.Builder newBuilder() { + return new AutoValue_SnapshotTableDefinition.Builder().setType(Type.SNAPSHOT); + } + + @VisibleForTesting + public abstract SnapshotTableDefinition.Builder toBuilder(); + + @Override + Table toPb() { + Table tablePb = super.toPb(); + com.google.api.services.bigquery.model.SnapshotDefinition snapshotDefinition = + new com.google.api.services.bigquery.model.SnapshotDefinition(); + snapshotDefinition.setBaseTableReference(getBaseTableId().toPb()); + snapshotDefinition.setSnapshotTime(DateTime.parseRfc3339(getSnapshotTime())); + tablePb.setSnapshotDefinition(snapshotDefinition); + if (getTimePartitioning() != null) { + tablePb.setTimePartitioning(getTimePartitioning().toPb()); + } + if (getRangePartitioning() != null) { + tablePb.setRangePartitioning(getRangePartitioning().toPb()); + } + if (getClustering() != null) { + tablePb.setClustering(getClustering().toPb()); + } + return tablePb; + } + + static SnapshotTableDefinition fromPb(Table tablePb) { + Builder builder = newBuilder().table(tablePb); + com.google.api.services.bigquery.model.SnapshotDefinition snapshotDefinition = + tablePb.getSnapshotDefinition(); + if (snapshotDefinition != null) { + if (snapshotDefinition.getBaseTableReference() != null) { + builder.setBaseTableId(TableId.fromPb(snapshotDefinition.getBaseTableReference())); + } + if (snapshotDefinition.getSnapshotTime() != null) { + builder.setSnapshotTime(snapshotDefinition.getSnapshotTime().toStringRfc3339()); + } + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardSQLTableType.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardSQLTableType.java new file mode 100644 index 0000000000..d44f89f922 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardSQLTableType.java @@ -0,0 +1,70 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import com.google.api.services.bigquery.model.StandardSqlTableType; +import com.google.auto.value.AutoValue; +import com.google.common.collect.Lists; +import java.io.Serializable; +import java.util.List; + +/** Represents Standard SQL table type information. */ +@AutoValue +public abstract class StandardSQLTableType implements Serializable { + + @AutoValue.Builder + public abstract static class Builder { + + /** Sets the columns in this table type. */ + public abstract Builder setColumns(List columns); + + /** Creates a {@code StandardSQLTableType} object. */ + public abstract StandardSQLTableType build(); + } + + /** Returns the columns in this table type. */ + public abstract List getColumns(); + + public abstract Builder toBuilder(); + + /** Returns a builder for a {@code StandardSQLTableType} object. */ + public static Builder newBuilder() { + return new AutoValue_StandardSQLTableType.Builder(); + } + + /** Returns a builder for a {@code StandardSQLTableType} object with the specified columns. */ + public static StandardSQLTableType.Builder newBuilder(List columns) { + return newBuilder().setColumns(columns); + } + + static StandardSQLTableType fromPb( + com.google.api.services.bigquery.model.StandardSqlTableType tableTypePb) { + StandardSQLTableType.Builder builder = newBuilder(); + if (tableTypePb.getColumns() != null) { + builder.setColumns( + Lists.transform(tableTypePb.getColumns(), StandardSQLField.FROM_PB_FUNCTION)); + } + return builder.build(); + } + + StandardSqlTableType toPb() { + StandardSqlTableType tableType = new StandardSqlTableType(); + if (getColumns() != null) { + tableType.setColumns(Lists.transform(getColumns(), StandardSQLField.TO_PB_FUNCTION)); + } + return tableType; + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardSQLTypeName.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardSQLTypeName.java index d618b76568..1f70183cdc 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardSQLTypeName.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardSQLTypeName.java @@ -56,5 +56,11 @@ public enum StandardSQLTypeName { /** Represents a year, month, day, hour, minute, second, and subsecond (microsecond precision). */ DATETIME, /** Represents a set of geographic points, represented as a Well Known Text (WKT) string. */ - GEOGRAPHY + GEOGRAPHY, + /** Represents JSON data. */ + JSON, + /** Represents duration or amount of time. */ + INTERVAL, + /** Represents a contiguous range of values. */ + RANGE } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardTableDefinition.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardTableDefinition.java index 4b09fe3c40..0319a6a4dd 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardTableDefinition.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardTableDefinition.java @@ -132,6 +132,20 @@ public abstract static class Builder public abstract Builder setNumLongTermBytes(Long numLongTermBytes); + public abstract Builder setNumTimeTravelPhysicalBytes(Long numTimeTravelPhysicalBytes); + + public abstract Builder setNumTotalLogicalBytes(Long numTotalLogicalBytes); + + public abstract Builder setNumActiveLogicalBytes(Long numActiveLogicalBytes); + + public abstract Builder setNumLongTermLogicalBytes(Long numLongTermLogicalBytes); + + public abstract Builder setNumTotalPhysicalBytes(Long numTotalPhysicalBytes); + + public abstract Builder setNumActivePhysicalBytes(Long numActivePhysicalBytes); + + public abstract Builder setNumLongTermPhysicalBytes(Long numLongTermPhysicalBytes); + public abstract Builder setNumRows(Long numRows); public abstract Builder setLocation(String location); @@ -161,6 +175,14 @@ public abstract static class Builder */ public abstract Builder setClustering(Clustering clustering); + public abstract Builder setTableConstraints(TableConstraints tableConstraints); + + /** + * Set the configuration of a BigLake managed table. If not set, the table is not a BigLake + * managed table. + */ + public abstract Builder setBigLakeConfiguration(BigLakeConfiguration biglakeConfiguration); + /** Creates a {@code StandardTableDefinition} object. */ public abstract StandardTableDefinition build(); } @@ -178,6 +200,62 @@ public abstract static class Builder @Nullable public abstract Long getNumLongTermBytes(); + /** + * Returns the number of time travel physical bytes. + * + * @see Storage Pricing + */ + @Nullable + public abstract Long getNumTimeTravelPhysicalBytes(); + + /** + * Returns the number of total logical bytes. + * + * @see Storage Pricing + */ + @Nullable + public abstract Long getNumTotalLogicalBytes(); + + /** + * Returns the number of active logical bytes. + * + * @see Storage Pricing + */ + @Nullable + public abstract Long getNumActiveLogicalBytes(); + + /** + * Returns the number of long term logical bytes. + * + * @see Storage Pricing + */ + @Nullable + public abstract Long getNumLongTermLogicalBytes(); + + /** + * Returns the number of total physical bytes. + * + * @see Storage Pricing + */ + @Nullable + public abstract Long getNumTotalPhysicalBytes(); + + /** + * Returns the number of active physical bytes. + * + * @see Storage Pricing + */ + @Nullable + public abstract Long getNumActivePhysicalBytes(); + + /** + * Returns the number of long term physical bytes. + * + * @see Storage Pricing + */ + @Nullable + public abstract Long getNumLongTermPhysicalBytes(); + /** Returns the number of rows in this table, excluding any data in the streaming buffer. */ @Nullable public abstract Long getNumRows(); @@ -221,6 +299,20 @@ public abstract static class Builder @Nullable public abstract Clustering getClustering(); + /** + * Returns the table constraints for this table. Returns {@code null} if no table constraints are + * set for this table. + */ + @Nullable + public abstract TableConstraints getTableConstraints(); + + /** + * [Optional] Specifies the configuration of a BigLake managed table. The value may be {@code + * null}. + */ + @Nullable + public abstract BigLakeConfiguration getBigLakeConfiguration(); + /** Returns a builder for a BigQuery standard table definition. */ public static Builder newBuilder() { return new AutoValue_StandardTableDefinition.Builder().setType(Type.TABLE); @@ -246,6 +338,13 @@ Table toPb() { } tablePb.setNumBytes(getNumBytes()); tablePb.setNumLongTermBytes(getNumLongTermBytes()); + tablePb.setNumTimeTravelPhysicalBytes(getNumTimeTravelPhysicalBytes()); + tablePb.setNumTotalLogicalBytes(getNumTotalLogicalBytes()); + tablePb.setNumActiveLogicalBytes(getNumActiveLogicalBytes()); + tablePb.setNumLongTermLogicalBytes(getNumLongTermLogicalBytes()); + tablePb.setNumTotalPhysicalBytes(getNumTotalPhysicalBytes()); + tablePb.setNumActivePhysicalBytes(getNumActivePhysicalBytes()); + tablePb.setNumLongTermPhysicalBytes(getNumLongTermPhysicalBytes()); tablePb.setLocation(getLocation()); if (getStreamingBuffer() != null) { tablePb.setStreamingBuffer(getStreamingBuffer().toPb()); @@ -259,6 +358,12 @@ Table toPb() { if (getClustering() != null) { tablePb.setClustering(getClustering().toPb()); } + if (getTableConstraints() != null) { + tablePb.setTableConstraints(getTableConstraints().toPb()); + } + if (getBigLakeConfiguration() != null) { + tablePb.setBiglakeConfiguration(getBigLakeConfiguration().toPb()); + } return tablePb; } @@ -296,6 +401,35 @@ static StandardTableDefinition fromPb(Table tablePb) { if (tablePb.getNumLongTermBytes() != null) { builder.setNumLongTermBytes(tablePb.getNumLongTermBytes()); } + if (tablePb.getNumTimeTravelPhysicalBytes() != null) { + builder.setNumTimeTravelPhysicalBytes(tablePb.getNumTimeTravelPhysicalBytes()); + } + if (tablePb.getNumTotalLogicalBytes() != null) { + builder.setNumTotalLogicalBytes(tablePb.getNumTotalLogicalBytes()); + } + if (tablePb.getNumActiveLogicalBytes() != null) { + builder.setNumActiveLogicalBytes(tablePb.getNumActiveLogicalBytes()); + } + if (tablePb.getNumLongTermLogicalBytes() != null) { + builder.setNumLongTermLogicalBytes(tablePb.getNumLongTermLogicalBytes()); + } + if (tablePb.getNumTotalPhysicalBytes() != null) { + builder.setNumTotalPhysicalBytes(tablePb.getNumTotalPhysicalBytes()); + } + if (tablePb.getNumActivePhysicalBytes() != null) { + builder.setNumActivePhysicalBytes(tablePb.getNumActivePhysicalBytes()); + } + if (tablePb.getNumLongTermPhysicalBytes() != null) { + builder.setNumLongTermPhysicalBytes(tablePb.getNumLongTermPhysicalBytes()); + } + if (tablePb.getTableConstraints() != null) { + builder.setTableConstraints(TableConstraints.fromPb(tablePb.getTableConstraints())); + } + if (tablePb.getBiglakeConfiguration() != null) { + builder.setBigLakeConfiguration( + BigLakeConfiguration.fromPb(tablePb.getBiglakeConfiguration())); + } + return builder.setNumBytes(tablePb.getNumBytes()).setLocation(tablePb.getLocation()).build(); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java index 5e1f1a2c03..56c6558555 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java @@ -49,10 +49,10 @@ public static class Builder extends TableInfo.Builder { private final BigQuery bigquery; private final TableInfo.BuilderImpl infoBuilder; - Builder(BigQuery bigquery, TableId tableId, TableDefinition defintion) { + Builder(BigQuery bigquery, TableId tableId, TableDefinition definition) { this.bigquery = bigquery; this.infoBuilder = new TableInfo.BuilderImpl(); - this.infoBuilder.setTableId(tableId).setDefinition(defintion); + this.infoBuilder.setTableId(tableId).setDefinition(definition); } Builder(Table table) { @@ -114,6 +114,48 @@ Builder setNumLongTermBytes(Long numLongTermBytes) { return this; } + @Override + Builder setNumTimeTravelPhysicalBytes(Long numTimeTravelPhysicalBytes) { + infoBuilder.setNumTimeTravelPhysicalBytes(numTimeTravelPhysicalBytes); + return this; + } + + @Override + Builder setNumTotalLogicalBytes(Long numTotalLogicalBytes) { + infoBuilder.setNumTotalLogicalBytes(numTotalLogicalBytes); + return this; + } + + @Override + Builder setNumActiveLogicalBytes(Long numActiveLogicalBytes) { + infoBuilder.setNumActiveLogicalBytes(numActiveLogicalBytes); + return this; + } + + @Override + Builder setNumLongTermLogicalBytes(Long numLongTermLogicalBytes) { + infoBuilder.setNumLongTermLogicalBytes(numLongTermLogicalBytes); + return this; + } + + @Override + Builder setNumTotalPhysicalBytes(Long numTotalPhysicalBytes) { + infoBuilder.setNumTotalPhysicalBytes(numTotalPhysicalBytes); + return this; + } + + @Override + Builder setNumActivePhysicalBytes(Long numActivePhysicalBytes) { + infoBuilder.setNumActivePhysicalBytes(numActivePhysicalBytes); + return this; + } + + @Override + Builder setNumLongTermPhysicalBytes(Long numLongTermPhysicalBytes) { + infoBuilder.setNumLongTermPhysicalBytes(numLongTermPhysicalBytes); + return this; + } + @Override Builder setNumRows(BigInteger numRows) { infoBuilder.setNumRows(numRows); @@ -150,12 +192,36 @@ public Builder setLabels(Map labels) { return this; } + @Override + public Builder setResourceTags(Map resourceTags) { + infoBuilder.setResourceTags(resourceTags); + return this; + } + @Override public Builder setRequirePartitionFilter(Boolean requirePartitionFilter) { infoBuilder.setRequirePartitionFilter(requirePartitionFilter); return this; } + @Override + public Builder setDefaultCollation(String defaultCollation) { + infoBuilder.setDefaultCollation(defaultCollation); + return this; + } + + @Override + public TableInfo.Builder setCloneDefinition(CloneDefinition cloneDefinition) { + infoBuilder.setCloneDefinition(cloneDefinition); + return this; + } + + @Override + public Builder setTableConstraints(TableConstraints tableConstraints) { + infoBuilder.setTableConstraints(tableConstraints); + return this; + } + @Override public Table build() { return new Table(bigquery, infoBuilder); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableConstraints.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableConstraints.java new file mode 100644 index 0000000000..ad30eafcc2 --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableConstraints.java @@ -0,0 +1,86 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import com.google.common.annotations.VisibleForTesting; +import java.io.Serializable; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nullable; + +@AutoValue +public abstract class TableConstraints implements Serializable { + public static TableConstraints.Builder newBuilder() { + return new AutoValue_TableConstraints.Builder(); + } + + static TableConstraints fromPb( + com.google.api.services.bigquery.model.TableConstraints tableConstraints) { + TableConstraints.Builder builder = newBuilder(); + + if (tableConstraints.getForeignKeys() != null) { + builder.setForeignKeys( + tableConstraints.getForeignKeys().stream() + .map(ForeignKey::fromPb) + .collect(Collectors.toList())); + } + if (tableConstraints.getPrimaryKey() != null) { + builder.setPrimaryKey(PrimaryKey.fromPb(tableConstraints.getPrimaryKey())); + } + + return builder.build(); + } + + com.google.api.services.bigquery.model.TableConstraints toPb() { + + com.google.api.services.bigquery.model.TableConstraints tableConstraints = + new com.google.api.services.bigquery.model.TableConstraints(); + if (getForeignKeys() != null) { + tableConstraints.setForeignKeys( + getForeignKeys().stream().map(ForeignKey::toPb).collect(Collectors.toList())); + } + if (getPrimaryKey() != null) { + tableConstraints.setPrimaryKey(getPrimaryKey().toPb()); + } + + return tableConstraints; + } + + @Nullable + public abstract List getForeignKeys(); + + @Nullable + public abstract PrimaryKey getPrimaryKey(); + + /** Returns a builder for table constraints. */ + @VisibleForTesting + public abstract TableConstraints.Builder toBuilder(); + + @AutoValue.Builder + public abstract static class Builder { + + /** The list of foreign keys for the table constraints. * */ + public abstract TableConstraints.Builder setForeignKeys(List foreignKeys); + + /** The primary key for the table constraints. * */ + public abstract TableConstraints.Builder setPrimaryKey(PrimaryKey primaryKey); + + /** Creates a {@code TableConstraints} object. */ + public abstract TableConstraints build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDataWriteChannel.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDataWriteChannel.java index f96f7892c9..dde4c08187 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDataWriteChannel.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDataWriteChannel.java @@ -16,12 +16,13 @@ package com.google.cloud.bigquery; -import static com.google.cloud.RetryHelper.runWithRetries; - import com.google.cloud.BaseWriteChannel; import com.google.cloud.RestorableState; -import com.google.cloud.RetryHelper; import com.google.cloud.WriteChannel; +import com.google.cloud.bigquery.BigQueryRetryHelper.BigQueryRetryHelperException; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.context.Scope; +import java.io.IOException; import java.util.List; import java.util.Objects; import java.util.concurrent.Callable; @@ -34,6 +35,9 @@ public class TableDataWriteChannel extends BaseWriteChannel { + private static final BigQueryRetryConfig EMPTY_RETRY_CONFIG = + BigQueryRetryConfig.newBuilder().build(); + private Job job; TableDataWriteChannel( @@ -48,23 +52,43 @@ public class TableDataWriteChannel @Override protected void flushBuffer(final int length, final boolean last) { - try { + Span flushBuffer = null; + if (getOptions().isOpenTelemetryTracingEnabled() + && getOptions().getOpenTelemetryTracer() != null) { + flushBuffer = + getOptions() + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.TableDataWriteChannel.flushBuffer") + .setAttribute("bq.table_data_write_channel.flush_buffer.length", length) + .setAttribute("bq.table_data_write_channel.flush_buffer.last", last) + .startSpan(); + } + + try (Scope flushBufferScope = flushBuffer != null ? flushBuffer.makeCurrent() : null) { com.google.api.services.bigquery.model.Job jobPb = - runWithRetries( + BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public com.google.api.services.bigquery.model.Job call() { + public com.google.api.services.bigquery.model.Job call() throws IOException { return getOptions() .getBigQueryRpcV2() - .write(getUploadId(), getBuffer(), 0, getPosition(), length, last); + .writeSkipExceptionTranslation( + getUploadId(), getBuffer(), 0, getPosition(), length, last); } }, getOptions().getRetrySettings(), - BigQueryImpl.EXCEPTION_HANDLER, - getOptions().getClock()); + getOptions().getResultRetryAlgorithm(), + getOptions().getClock(), + EMPTY_RETRY_CONFIG, + getOptions().isOpenTelemetryTracingEnabled(), + getOptions().getOpenTelemetryTracer()); job = jobPb != null ? Job.fromPb(getOptions().getService(), jobPb) : null; - } catch (RetryHelper.RetryHelperException e) { + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (flushBuffer != null) { + flushBuffer.end(); + } } } @@ -77,24 +101,42 @@ private static String open( final BigQueryOptions options, final JobId jobId, final WriteChannelConfiguration writeChannelConfiguration) { - try { - return runWithRetries( + Span open = null; + if (options.isOpenTelemetryTracingEnabled() && options.getOpenTelemetryTracer() != null) { + open = + options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.TableDataWriteChannel.open") + .setAllAttributes(jobId.getOtelAttributes()) + .setAllAttributes(writeChannelConfiguration.getDestinationTable().getOtelAttributes()) + .startSpan(); + } + + try (Scope openScope = open != null ? open.makeCurrent() : null) { + return BigQueryRetryHelper.runWithRetries( new Callable() { @Override - public String call() { + public String call() throws IOException { return options .getBigQueryRpcV2() - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setConfiguration(writeChannelConfiguration.toPb()) .setJobReference(jobId.toPb())); } }, options.getRetrySettings(), - BigQueryImpl.EXCEPTION_HANDLER, - options.getClock()); - } catch (RetryHelper.RetryHelperException e) { + options.getResultRetryAlgorithm(), + options.getClock(), + EMPTY_RETRY_CONFIG, + options.isOpenTelemetryTracingEnabled(), + options.getOpenTelemetryTracer()); + } catch (BigQueryRetryHelperException e) { throw BigQueryException.translateAndThrow(e); + } finally { + if (open != null) { + open.end(); + } } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDefinition.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDefinition.java index 6babd4e6f4..1fa9024987 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDefinition.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDefinition.java @@ -83,6 +83,8 @@ public Type apply(String constant) { */ public static final Type MODEL = type.createAndRegister("MODEL"); + public static final Type SNAPSHOT = type.createAndRegister("SNAPSHOT"); + private Type(String constant) { super(constant); } @@ -165,6 +167,8 @@ static T fromPb(Table tablePb) { return (T) ExternalTableDefinition.fromPb(tablePb); case "MODEL": return (T) ModelTableDefinition.fromPb(tablePb); + case "SNAPSHOT": + return (T) SnapshotTableDefinition.fromPb(tablePb); default: // never reached throw new IllegalArgumentException("Format " + tablePb.getType() + " is not supported"); diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java index b74055d4f2..a4177b503d 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java @@ -22,6 +22,7 @@ import com.google.api.services.bigquery.model.TableReference; import com.google.common.base.Function; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.util.Objects; @@ -70,8 +71,6 @@ public String getIAMResourceName() { } private TableId(String project, String dataset, String table) { - checkArgument(!isNullOrEmpty(dataset), "Provided dataset is null or empty"); - checkArgument(!isNullOrEmpty(table), "Provided table is null or empty"); this.project = project; this.dataset = dataset; this.table = table; @@ -114,4 +113,12 @@ TableReference toPb() { static TableId fromPb(TableReference tableRef) { return new TableId(tableRef.getProjectId(), tableRef.getDatasetId(), tableRef.getTableId()); } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .put("bq.table.project", this.getProject()) + .put("bq.table.dataset", this.getDataset()) + .put("bq.table.id", this.getTable()) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableInfo.java index d3e58f9802..b3236f4c3d 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableInfo.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableInfo.java @@ -25,6 +25,7 @@ import com.google.api.services.bigquery.model.Table; import com.google.common.base.Function; import com.google.common.base.MoreObjects; +import io.opentelemetry.api.common.Attributes; import java.io.Serializable; import java.math.BigInteger; import java.util.Map; @@ -67,11 +68,24 @@ public Table apply(TableInfo tableInfo) { private final Long lastModifiedTime; private final Long numBytes; private final Long numLongTermBytes; + private final Long numTimeTravelPhysicalBytes; + private final Long numTotalLogicalBytes; + private final Long numActiveLogicalBytes; + private final Long numLongTermLogicalBytes; + private final Long numTotalPhysicalBytes; + private final Long numActivePhysicalBytes; + private final Long numLongTermPhysicalBytes; private final BigInteger numRows; private final TableDefinition definition; private final EncryptionConfiguration encryptionConfiguration; - private final Labels labels; + private final Annotations labels; + + private final Annotations resourceTags; private final Boolean requirePartitionFilter; + private final String defaultCollation; + + private final CloneDefinition cloneDefinition; + private final TableConstraints tableConstraints; /** A builder for {@code TableInfo} objects. */ public abstract static class Builder { @@ -100,6 +114,20 @@ public abstract static class Builder { abstract Builder setNumLongTermBytes(Long numLongTermBytes); + abstract Builder setNumTimeTravelPhysicalBytes(Long numTimeTravelPhysicalBytes); + + abstract Builder setNumTotalLogicalBytes(Long numTotalLogicalBytes); + + abstract Builder setNumActiveLogicalBytes(Long numActiveLogicalBytes); + + abstract Builder setNumLongTermLogicalBytes(Long numLongTermLogicalBytes); + + abstract Builder setNumTotalPhysicalBytes(Long numTotalPhysicalBytes); + + abstract Builder setNumActivePhysicalBytes(Long numActivePhysicalBytes); + + abstract Builder setNumLongTermPhysicalBytes(Long numLongTermPhysicalBytes); + abstract Builder setNumRows(BigInteger numRows); abstract Builder setSelfLink(String selfLink); @@ -127,6 +155,9 @@ public abstract static class Builder { @BetaApi public abstract Builder setLabels(Map labels); + /** Sets the resource tags applied to this table. */ + public abstract Builder setResourceTags(Map resourceTags); + /** Creates a {@code TableInfo} object. */ public abstract TableInfo build(); @@ -135,6 +166,12 @@ public abstract static class Builder { public Builder setRequirePartitionFilter(Boolean requirePartitionFilter) { return this; } + + public abstract Builder setDefaultCollation(String defaultCollation); + + public abstract Builder setCloneDefinition(CloneDefinition cloneDefinition); + + public abstract Builder setTableConstraints(TableConstraints tableConstraints); } static class BuilderImpl extends Builder { @@ -150,11 +187,23 @@ static class BuilderImpl extends Builder { private Long lastModifiedTime; private Long numBytes; private Long numLongTermBytes; + private Long numTimeTravelPhysicalBytes; + private Long numTotalLogicalBytes; + private Long numActiveLogicalBytes; + private Long numLongTermLogicalBytes; + private Long numTotalPhysicalBytes; + private Long numActivePhysicalBytes; + private Long numLongTermPhysicalBytes; private BigInteger numRows; private TableDefinition definition; private EncryptionConfiguration encryptionConfiguration; - private Labels labels = Labels.ZERO; + private Annotations labels = Annotations.ZERO; + + private Annotations resourceTags = Annotations.ZERO; private Boolean requirePartitionFilter; + private String defaultCollation; + private CloneDefinition cloneDefinition; + private TableConstraints tableConstraints; BuilderImpl() {} @@ -170,11 +219,22 @@ static class BuilderImpl extends Builder { this.lastModifiedTime = tableInfo.lastModifiedTime; this.numBytes = tableInfo.numBytes; this.numLongTermBytes = tableInfo.numLongTermBytes; + this.numTimeTravelPhysicalBytes = tableInfo.numTimeTravelPhysicalBytes; + this.numTotalLogicalBytes = tableInfo.numTotalLogicalBytes; + this.numActiveLogicalBytes = tableInfo.numActiveLogicalBytes; + this.numLongTermLogicalBytes = tableInfo.numLongTermLogicalBytes; + this.numTotalPhysicalBytes = tableInfo.numTotalPhysicalBytes; + this.numActivePhysicalBytes = tableInfo.numActivePhysicalBytes; + this.numLongTermPhysicalBytes = tableInfo.numLongTermPhysicalBytes; this.numRows = tableInfo.numRows; this.definition = tableInfo.definition; this.encryptionConfiguration = tableInfo.encryptionConfiguration; this.labels = tableInfo.labels; + this.resourceTags = tableInfo.resourceTags; this.requirePartitionFilter = tableInfo.requirePartitionFilter; + this.defaultCollation = tableInfo.defaultCollation; + this.cloneDefinition = tableInfo.cloneDefinition; + this.tableConstraints = tableInfo.tableConstraints; } BuilderImpl(Table tablePb) { @@ -191,14 +251,29 @@ static class BuilderImpl extends Builder { this.selfLink = tablePb.getSelfLink(); this.numBytes = tablePb.getNumBytes(); this.numLongTermBytes = tablePb.getNumLongTermBytes(); + this.numTimeTravelPhysicalBytes = tablePb.getNumTimeTravelPhysicalBytes(); + this.numTotalLogicalBytes = tablePb.getNumTotalLogicalBytes(); + this.numActiveLogicalBytes = tablePb.getNumActiveLogicalBytes(); + this.numLongTermLogicalBytes = tablePb.getNumLongTermLogicalBytes(); + this.numTotalPhysicalBytes = tablePb.getNumTotalPhysicalBytes(); + this.numActivePhysicalBytes = tablePb.getNumActivePhysicalBytes(); + this.numLongTermPhysicalBytes = tablePb.getNumLongTermPhysicalBytes(); this.numRows = tablePb.getNumRows(); this.definition = TableDefinition.fromPb(tablePb); if (tablePb.getEncryptionConfiguration() != null) { this.encryptionConfiguration = new EncryptionConfiguration.Builder(tablePb.getEncryptionConfiguration()).build(); } - this.labels = Labels.fromPb(tablePb.getLabels()); + this.labels = Annotations.fromPb(tablePb.getLabels()); + this.resourceTags = Annotations.fromPb(tablePb.getResourceTags()); this.requirePartitionFilter = tablePb.getRequirePartitionFilter(); + this.defaultCollation = tablePb.getDefaultCollation(); + if (tablePb.getCloneDefinition() != null) { + this.cloneDefinition = CloneDefinition.fromPb(tablePb.getCloneDefinition()); + } + if (tablePb.getTableConstraints() != null) { + this.tableConstraints = TableConstraints.fromPb(tablePb.getTableConstraints()); + } } @Override @@ -255,6 +330,48 @@ Builder setNumLongTermBytes(Long numLongTermBytes) { return this; } + @Override + Builder setNumTimeTravelPhysicalBytes(Long numTimeTravelPhysicalBytes) { + this.numTimeTravelPhysicalBytes = numTimeTravelPhysicalBytes; + return this; + } + + @Override + Builder setNumTotalLogicalBytes(Long numTotalLogicalBytes) { + this.numTotalLogicalBytes = numTotalLogicalBytes; + return this; + } + + @Override + Builder setNumActiveLogicalBytes(Long numActiveLogicalBytes) { + this.numActiveLogicalBytes = numActiveLogicalBytes; + return this; + } + + @Override + Builder setNumLongTermLogicalBytes(Long numLongTermLogicalBytes) { + this.numLongTermLogicalBytes = numLongTermLogicalBytes; + return this; + } + + @Override + Builder setNumTotalPhysicalBytes(Long numTotalPhysicalBytes) { + this.numTotalPhysicalBytes = numTotalPhysicalBytes; + return this; + } + + @Override + Builder setNumActivePhysicalBytes(Long numActivePhysicalBytes) { + this.numActivePhysicalBytes = numActivePhysicalBytes; + return this; + } + + @Override + Builder setNumLongTermPhysicalBytes(Long numLongTermPhysicalBytes) { + this.numLongTermPhysicalBytes = numLongTermPhysicalBytes; + return this; + } + @Override Builder setNumRows(BigInteger numRows) { this.numRows = numRows; @@ -287,7 +404,13 @@ public Builder setEncryptionConfiguration(EncryptionConfiguration configuration) @Override public Builder setLabels(Map labels) { - this.labels = Labels.fromUser(labels); + this.labels = Annotations.fromUser(labels); + return this; + } + + @Override + public Builder setResourceTags(Map resourceTags) { + this.resourceTags = Annotations.fromUser(resourceTags); return this; } @@ -297,6 +420,22 @@ public Builder setRequirePartitionFilter(Boolean requirePartitionFilter) { return this; } + @Override + public Builder setDefaultCollation(String defaultCollation) { + this.defaultCollation = defaultCollation; + return this; + } + + public Builder setCloneDefinition(CloneDefinition cloneDefinition) { + this.cloneDefinition = cloneDefinition; + return this; + } + + public Builder setTableConstraints(TableConstraints tableConstraints) { + this.tableConstraints = tableConstraints; + return this; + } + @Override public TableInfo build() { return new TableInfo(this); @@ -315,11 +454,22 @@ public TableInfo build() { this.lastModifiedTime = builder.lastModifiedTime; this.numBytes = builder.numBytes; this.numLongTermBytes = builder.numLongTermBytes; + this.numTimeTravelPhysicalBytes = builder.numTimeTravelPhysicalBytes; + this.numTotalLogicalBytes = builder.numTotalLogicalBytes; + this.numActiveLogicalBytes = builder.numActiveLogicalBytes; + this.numLongTermLogicalBytes = builder.numLongTermLogicalBytes; + this.numTotalPhysicalBytes = builder.numTotalPhysicalBytes; + this.numActivePhysicalBytes = builder.numActivePhysicalBytes; + this.numLongTermPhysicalBytes = builder.numLongTermPhysicalBytes; this.numRows = builder.numRows; this.definition = builder.definition; this.encryptionConfiguration = builder.encryptionConfiguration; - labels = builder.labels; + this.labels = builder.labels; + this.resourceTags = builder.resourceTags; this.requirePartitionFilter = builder.requirePartitionFilter; + this.defaultCollation = builder.defaultCollation; + this.cloneDefinition = builder.cloneDefinition; + this.tableConstraints = builder.tableConstraints; } /** Returns the hash of the table resource. */ @@ -398,6 +548,69 @@ public Long getNumLongTermBytes() { return numLongTermBytes; } + /** + * Returns the number of time travel physical bytes. + * + * @see Storage Pricing + */ + public Long getNumTimeTravelPhysicalBytes() { + return this.numTimeTravelPhysicalBytes; + } + + /** + * Returns the number of total logical bytes. + * + * @see Storage Pricing + */ + public Long getNumTotalLogicalBytes() { + return this.numTotalLogicalBytes; + } + + /** + * Returns the number of active logical bytes. + * + * @see Storage Pricing + */ + public Long getNumActiveLogicalBytes() { + return this.numActiveLogicalBytes; + } + + /** + * Returns the number of long term logical bytes. + * + * @see Storage Pricing + */ + public Long getNumLongTermLogicalBytes() { + return this.numLongTermLogicalBytes; + } + + /** + * Returns the number of total physical bytes. + * + * @see Storage Pricing + */ + public Long getNumTotalPhysicalBytes() { + return this.numTotalPhysicalBytes; + } + + /** + * Returns the number of active physical bytes. + * + * @see Storage Pricing + */ + public Long getNumActivePhysicalBytes() { + return this.numActivePhysicalBytes; + } + + /** + * Returns the number of long term physical bytes. + * + * @see Storage Pricing + */ + public Long getNumLongTermPhysicalBytes() { + return this.numLongTermPhysicalBytes; + } + /** Returns the number of rows of data in this table */ public BigInteger getNumRows() { return numRows; @@ -414,6 +627,11 @@ public Map getLabels() { return labels.userMap(); } + /** Return a map for resource tags applied to the table. */ + public Map getResourceTags() { + return resourceTags.userMap(); + } + /** * Returns true if a partition filter (that can be used for partition elimination) is required for * queries over this table. @@ -422,6 +640,18 @@ public Boolean getRequirePartitionFilter() { return requirePartitionFilter; } + public String getDefaultCollation() { + return defaultCollation; + } + + public CloneDefinition getCloneDefinition() { + return cloneDefinition; + } + + public TableConstraints getTableConstraints() { + return tableConstraints; + } + /** Returns a builder for the table object. */ public Builder toBuilder() { return new BuilderImpl(this); @@ -441,11 +671,22 @@ public String toString() { .add("lastModifiedTime", lastModifiedTime) .add("numBytes", numBytes) .add("numLongTermBytes", numLongTermBytes) + .add("numTimeTravelPhysicalBytes", numTimeTravelPhysicalBytes) + .add("numTotalLogicalBytes", numTotalLogicalBytes) + .add("numActiveLogicalBytes", numActiveLogicalBytes) + .add("numLongTermLogicalBytes", numLongTermLogicalBytes) + .add("numTotalPhysicalBytes", numTotalPhysicalBytes) + .add("numActivePhysicalBytes", numActivePhysicalBytes) + .add("numLongTermPhysicalBytes", numLongTermPhysicalBytes) .add("numRows", numRows) .add("definition", definition) .add("encryptionConfiguration", encryptionConfiguration) .add("labels", labels) + .add("resourceTags", resourceTags) .add("requirePartitionFilter", requirePartitionFilter) + .add("defaultCollation", defaultCollation) + .add("cloneDefinition", cloneDefinition) + .add("tableConstraints", tableConstraints) .toString(); } @@ -506,11 +747,36 @@ Table toPb() { tablePb.setEncryptionConfiguration(encryptionConfiguration.toPb()); } tablePb.setLabels(labels.toPb()); + tablePb.setResourceTags(resourceTags.toPb()); tablePb.setRequirePartitionFilter(requirePartitionFilter); + if (defaultCollation != null) { + tablePb.setDefaultCollation(defaultCollation); + } + if (cloneDefinition != null) { + tablePb.setCloneDefinition(cloneDefinition.toPb()); + } + if (tableConstraints != null) { + tablePb.setTableConstraints(tableConstraints.toPb()); + } return tablePb; } static TableInfo fromPb(Table tablePb) { return new BuilderImpl(tablePb).build(); } + + private static String getFieldAsString(Object field) { + return field == null ? "null" : field.toString(); + } + + protected Attributes getOtelAttributes() { + return Attributes.builder() + .putAll(this.getTableId().getOtelAttributes()) + .put("bq.table.creation_time", getFieldAsString(this.getCreationTime())) + .put("bq.table.expiration_time", getFieldAsString(this.getExpirationTime())) + .put("bq.table.last_modified_time", getFieldAsString(this.getLastModifiedTime())) + .put("bq.table.num_bytes", getFieldAsString(this.getNumBytes())) + .put("bq.table.num_rows", getFieldAsString(this.getNumRows())) + .build(); + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableMetadataCacheUsage.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableMetadataCacheUsage.java new file mode 100644 index 0000000000..89ad4f966d --- /dev/null +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableMetadataCacheUsage.java @@ -0,0 +1,118 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import com.google.auto.value.AutoValue; +import java.io.Serializable; +import javax.annotation.Nullable; + +/** Represents Table level detail on the usage of metadata caching. */ +@AutoValue +public abstract class TableMetadataCacheUsage implements Serializable { + + private static final long serialVersionUID = 1L; + + /** Reason for not using metadata caching for the table. */ + public enum UnusedReason { + /** Unused reasons not specified. */ + UNUSED_REASON_UNSPECIFIED, + + /** Metadata cache was outside the table's maxStaleness. */ + EXCEEDED_MAX_STALENESS, + + /** + * Metadata caching feature is not enabled. Update BigLake tables to enable the metadata + * caching. + */ + METADATA_CACHING_NOT_ENABLED, + + /** Other unknown reason. */ + OTHER_REASON + } + + @AutoValue.Builder + public abstract static class Builder { + /** Sets the free form human-readable reason metadata caching was unused for the job. */ + public abstract TableMetadataCacheUsage.Builder setExplanation(String explanation); + + /** Sets the metadata caching eligible table referenced in the query. */ + public abstract TableMetadataCacheUsage.Builder setTableReference(TableId tableReference); + + /** Sets the table type. */ + public abstract TableMetadataCacheUsage.Builder setTableType(String tableType); + + /** Sets reason for not using metadata caching for the table. */ + public abstract TableMetadataCacheUsage.Builder setUnusedReason(UnusedReason unusedReason); + + /** Creates a @code TableMetadataCacheUsage} object. */ + public abstract TableMetadataCacheUsage build(); + } + + public abstract Builder toBuilder(); + + public static Builder newBuilder() { + return new AutoValue_TableMetadataCacheUsage.Builder(); + } + + @Nullable + public abstract String getExplanation(); + + @Nullable + public abstract TableId getTableReference(); + + @Nullable + public abstract String getTableType(); + + @Nullable + public abstract UnusedReason getUnusedReason(); + + com.google.api.services.bigquery.model.TableMetadataCacheUsage toPb() { + com.google.api.services.bigquery.model.TableMetadataCacheUsage tableMetadataCacheUsage = + new com.google.api.services.bigquery.model.TableMetadataCacheUsage(); + if (getExplanation() != null) { + tableMetadataCacheUsage.setExplanation(getExplanation()); + } + if (getTableReference() != null) { + tableMetadataCacheUsage.setTableReference(getTableReference().toPb()); + } + if (getTableType() != null) { + tableMetadataCacheUsage.setTableType(getTableType()); + } + if (getUnusedReason() != null) { + tableMetadataCacheUsage.setUnusedReason(getUnusedReason().toString()); + } + return tableMetadataCacheUsage; + } + + static TableMetadataCacheUsage fromPb( + com.google.api.services.bigquery.model.TableMetadataCacheUsage tableMetadataCacheUsage) { + Builder builder = newBuilder(); + if (tableMetadataCacheUsage.getExplanation() != null) { + builder.setExplanation(tableMetadataCacheUsage.getExplanation()); + } + if (tableMetadataCacheUsage.getTableReference() != null) { + builder.setTableReference(TableId.fromPb(tableMetadataCacheUsage.getTableReference())); + } + if (tableMetadataCacheUsage.getTableType() != null) { + builder.setTableType(tableMetadataCacheUsage.getTableType()); + } + if (tableMetadataCacheUsage.getUnusedReason() != null) { + builder.setUnusedReason(UnusedReason.valueOf(tableMetadataCacheUsage.getUnusedReason())); + } + return builder.build(); + } +} diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableResult.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableResult.java index a7be10d7d2..a7aa6ba9de 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableResult.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableResult.java @@ -16,10 +16,8 @@ package com.google.cloud.bigquery; -import static com.google.common.base.Preconditions.checkNotNull; - -import com.google.api.core.InternalApi; import com.google.api.gax.paging.Page; +import com.google.auto.value.AutoValue; import com.google.common.base.Function; import com.google.common.base.MoreObjects; import com.google.common.collect.Iterables; @@ -28,68 +26,97 @@ import java.util.Objects; import javax.annotation.Nullable; -public class TableResult implements Page, Serializable { +@AutoValue +public abstract class TableResult implements Page, Serializable { - private static final long serialVersionUID = -4831062717210349819L; + private static final long serialVersionUID = 1L; - @Nullable private final Schema schema; - private final long totalRows; - private final Page pageNoSchema; + @AutoValue.Builder + public abstract static class Builder { + public abstract TableResult.Builder setSchema(Schema schema); - /** - * If {@code schema} is non-null, {@code TableResult} adds the schema to {@code FieldValueList}s - * when iterating through them. {@code pageNoSchema} must not be null. - */ - @InternalApi("Exposed for testing") - public TableResult(Schema schema, long totalRows, Page pageNoSchema) { - this.schema = schema; - this.totalRows = totalRows; - this.pageNoSchema = checkNotNull(pageNoSchema); + /** + * Sets the total number of rows in the complete result set, which can be more than the number + * of rows in the first page of results returned by {@link #getValues()}. + */ + public abstract TableResult.Builder setTotalRows(Long totalRows); + + public abstract TableResult.Builder setJobId(JobId jobId); + + public abstract TableResult.Builder setPageNoSchema(Page pageNoSchema); + + public abstract TableResult.Builder setQueryId(String queryId); + + public abstract TableResult.Builder setJobCreationReason(JobCreationReason jobCreationReason); + + /** Creates a @code TableResult} object. */ + public abstract TableResult build(); } - /** Returns the schema of the results. Null if the schema is not supplied. */ - public Schema getSchema() { - return schema; + public abstract Builder toBuilder(); + + public static Builder newBuilder() { + return new AutoValue_TableResult.Builder(); } + /** Returns the schema of the results. Null if the schema is not supplied. */ + @Nullable + public abstract Schema getSchema(); + /** * Returns the total number of rows in the complete result set, which can be more than the number - * of rows in the first page of results returned by {@link #getValues()}. + * of rows in the first page of results. If no rows are returned, this value can still be greater + * than 0 if any rows were affected by the query, such as INSERT, UPDATE, or DELETE queries. */ - public long getTotalRows() { - return totalRows; - } + public abstract long getTotalRows(); + + public abstract Page getPageNoSchema(); + + @Nullable + public abstract JobId getJobId(); + + @Nullable + public abstract String getQueryId(); + + @Nullable + public abstract JobCreationReason getJobCreationReason(); @Override public boolean hasNextPage() { - return pageNoSchema.hasNextPage(); + return getPageNoSchema().hasNextPage(); } @Override public String getNextPageToken() { - return pageNoSchema.getNextPageToken(); + return getPageNoSchema().getNextPageToken(); } @Override public TableResult getNextPage() { - if (pageNoSchema.hasNextPage()) { - return new TableResult(schema, totalRows, pageNoSchema.getNextPage()); + if (getPageNoSchema().hasNextPage()) { + return TableResult.newBuilder() + .setSchema(getSchema()) + .setTotalRows(getTotalRows()) + .setPageNoSchema(getPageNoSchema().getNextPage()) + .setQueryId(getQueryId()) + .setJobCreationReason(getJobCreationReason()) + .build(); } return null; } @Override public Iterable iterateAll() { - return addSchema(pageNoSchema.iterateAll()); + return addSchema(getPageNoSchema().iterateAll()); } @Override public Iterable getValues() { - return addSchema(pageNoSchema.getValues()); + return addSchema(getPageNoSchema().getValues()); } private Iterable addSchema(Iterable iter) { - if (schema == null) { + if (getSchema() == null) { return iter; } return Iterables.transform( @@ -97,7 +124,7 @@ private Iterable addSchema(Iterable iter) { new Function() { @Override public FieldValueList apply(FieldValueList list) { - return list.withSchema(schema.getFields()); + return list.withSchema(getSchema().getFields()); } }); } @@ -106,15 +133,16 @@ public FieldValueList apply(FieldValueList list) { public String toString() { return MoreObjects.toStringHelper(this) .add("rows", getValues()) - .add("schema", schema) - .add("totalRows", totalRows) + .add("schema", getSchema()) + .add("totalRows", getTotalRows()) .add("cursor", getNextPageToken()) + .add("queryId", getQueryId()) .toString(); } @Override public final int hashCode() { - return Objects.hash(pageNoSchema, schema, totalRows); + return Objects.hash(getPageNoSchema(), getSchema(), getTotalRows(), getQueryId()); } @Override @@ -122,13 +150,14 @@ public final boolean equals(Object obj) { if (obj == this) { return true; } - if (obj == null || !obj.getClass().equals(TableResult.class)) { + if (obj == null || !obj.getClass().equals(AutoValue_TableResult.class)) { return false; } TableResult response = (TableResult) obj; return Objects.equals(getNextPageToken(), response.getNextPageToken()) && Iterators.elementsEqual(getValues().iterator(), response.getValues().iterator()) - && Objects.equals(schema, response.schema) - && totalRows == response.totalRows; + && Objects.equals(getSchema(), response.getSchema()) + && getTotalRows() == response.getTotalRows() + && getQueryId() == response.getQueryId(); } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/WriteChannelConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/WriteChannelConfiguration.java index 479a680999..114c6dadd4 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/WriteChannelConfiguration.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/WriteChannelConfiguration.java @@ -25,6 +25,7 @@ import com.google.cloud.bigquery.JobInfo.WriteDisposition; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; import com.google.common.primitives.Ints; import java.io.Serializable; import java.util.List; @@ -55,9 +56,12 @@ public final class WriteChannelConfiguration implements LoadConfiguration, Seria private final Clustering clustering; private final Boolean useAvroLogicalTypes; private final Map labels; + private List decimalTargetTypes; + private final List connectionProperties; - public static final class Builder implements LoadConfiguration.Builder { + private final Boolean createSession; + public static final class Builder implements LoadConfiguration.Builder { private TableId destinationTable; private CreateDisposition createDisposition; private WriteDisposition writeDisposition; @@ -73,6 +77,10 @@ public static final class Builder implements LoadConfiguration.Builder { private Clustering clustering; private Boolean useAvroLogicalTypes; private Map labels; + private List decimalTargetTypes; + private List connectionProperties; + + private Boolean createSession; private Builder() {} @@ -93,6 +101,9 @@ private Builder(WriteChannelConfiguration writeChannelConfiguration) { this.clustering = writeChannelConfiguration.clustering; this.useAvroLogicalTypes = writeChannelConfiguration.useAvroLogicalTypes; this.labels = writeChannelConfiguration.labels; + this.decimalTargetTypes = writeChannelConfiguration.decimalTargetTypes; + this.connectionProperties = writeChannelConfiguration.connectionProperties; + this.createSession = writeChannelConfiguration.createSession; } private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) { @@ -169,6 +180,16 @@ private Builder(com.google.api.services.bigquery.model.JobConfiguration configur if (configurationPb.getLabels() != null) { this.labels = configurationPb.getLabels(); } + if (loadConfigurationPb.getDecimalTargetTypes() != null) { + this.decimalTargetTypes = loadConfigurationPb.getDecimalTargetTypes(); + } + if (loadConfigurationPb.getConnectionProperties() != null) { + + this.connectionProperties = + Lists.transform( + loadConfigurationPb.getConnectionProperties(), ConnectionProperty.FROM_PB_FUNCTION); + } + createSession = loadConfigurationPb.getCreateSession(); } @Override @@ -262,6 +283,22 @@ public Builder setLabels(Map labels) { return this; } + @Override + public Builder setDecimalTargetTypes(List decimalTargetTypes) { + this.decimalTargetTypes = decimalTargetTypes; + return this; + } + + public Builder setConnectionProperties(List connectionProperties) { + this.connectionProperties = ImmutableList.copyOf(connectionProperties); + return this; + } + + public Builder setCreateSession(Boolean createSession) { + this.createSession = createSession; + return this; + } + @Override public WriteChannelConfiguration build() { return new WriteChannelConfiguration(this); @@ -284,6 +321,9 @@ protected WriteChannelConfiguration(Builder builder) { this.clustering = builder.clustering; this.useAvroLogicalTypes = builder.useAvroLogicalTypes; this.labels = builder.labels; + this.decimalTargetTypes = builder.decimalTargetTypes; + this.connectionProperties = builder.connectionProperties; + this.createSession = builder.createSession; } @Override @@ -372,6 +412,19 @@ public Map getLabels() { return labels; } + @Override + public List getDecimalTargetTypes() { + return decimalTargetTypes; + } + + public List getConnectionProperties() { + return connectionProperties; + } + + public Boolean getCreateSession() { + return createSession; + } + @Override public Builder toBuilder() { return new Builder(this); @@ -393,7 +446,10 @@ MoreObjects.ToStringHelper toStringHelper() { .add("timePartitioning", timePartitioning) .add("clustering", clustering) .add("useAvroLogicalTypes", useAvroLogicalTypes) - .add("labels", labels); + .add("labels", labels) + .add("decimalTargetTypes", decimalTargetTypes) + .add("connectionProperties", connectionProperties) + .add("createSession", createSession); } @Override @@ -424,7 +480,10 @@ public int hashCode() { timePartitioning, clustering, useAvroLogicalTypes, - labels); + labels, + decimalTargetTypes, + connectionProperties, + createSession); } WriteChannelConfiguration setProjectId(String projectId) { @@ -455,7 +514,8 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { .setAllowJaggedRows(csvOptions.allowJaggedRows()) .setAllowQuotedNewlines(csvOptions.allowQuotedNewLines()) .setEncoding(csvOptions.getEncoding()) - .setQuote(csvOptions.getQuote()); + .setQuote(csvOptions.getQuote()) + .setPreserveAsciiControlCharacters(csvOptions.getPreserveAsciiControlCharacters()); if (csvOptions.getSkipLeadingRows() != null) { // todo(mziccard) remove checked cast or comment when #1044 is closed loadConfigurationPb.setSkipLeadingRows(Ints.checkedCast(csvOptions.getSkipLeadingRows())); @@ -495,6 +555,16 @@ com.google.api.services.bigquery.model.JobConfiguration toPb() { if (labels != null) { jobConfiguration.setLabels(labels); } + if (decimalTargetTypes != null) { + loadConfigurationPb.setDecimalTargetTypes(decimalTargetTypes); + } + if (connectionProperties != null) { + loadConfigurationPb.setConnectionProperties( + Lists.transform(connectionProperties, ConnectionProperty.TO_PB_FUNCTION)); + } + if (createSession != null) { + loadConfigurationPb.setCreateSession(createSession); + } jobConfiguration.setLoad(loadConfigurationPb); return jobConfiguration; } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/package-info.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/package-info.java index 65f801357f..85c3584fd6 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/package-info.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/package-info.java @@ -18,29 +18,18 @@ * A client for BigQuery – A fully managed, petabyte scale, low cost enterprise data warehouse for * analytics. * - *

    A simple usage example showing how to create a table if it does not exist and load data into - * it. For the complete source code see - * CreateTableAndLoadData.java. + *

    A simple usage example showing how to create a table in Bigquery. For the complete source code + * see + * CreateTable.java. * *

    {@code
    - * BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
    - * TableId tableId = TableId.of("dataset", "table");
    - * Table table = bigquery.getTable(tableId);
    - * if (table == null) {
    - *   System.out.println("Creating table " + tableId);
    - *   Field integerField = Field.of("fieldName", Field.Type.integer());
    - *   Schema schema = Schema.of(integerField);
    - *   table = bigquery.create(TableInfo.of(tableId, StandardTableDefinition.of(schema)));
    - * }
    - * System.out.println("Loading data into table " + tableId);
    - * Job loadJob = table.load(FormatOptions.csv(), "gs://bucket/path");
    - * loadJob = loadJob.waitFor();
    - * if (loadJob.getStatus().getError() != null) {
    - *   System.out.println("Job completed with errors");
    - * } else {
    - *   System.out.println("Job succeeded");
    - * }
    + * BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); *
    + * TableId tableId = TableId.of(datasetName, tableName);
    + * TableDefinition tableDefinition = StandardTableDefinition.of(schema);
    + * TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); *
    + * bigquery.create(tableInfo);
    + * System.out.println("Table created successfully");
      * }
    * * @see Google Cloud BigQuery diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java index c9b0f0a21d..65fd45d02a 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java @@ -40,12 +40,13 @@ @InternalExtensionOnly public interface BigQueryRpc extends ServiceRpc { - // These options are part of the Google Cloud BigQuery query parameters + // These options are part of the Google Cloud BigQuery query parameters. enum Option { FIELDS("fields"), DELETE_CONTENTS("deleteContents"), ALL_DATASETS("all"), ALL_USERS("allUsers"), + AUTODETECT_SCHEMA("autodetectSchema"), LABEL_FILTER("filter"), MIN_CREATION_TIME("minCreationTime"), MAX_CREATION_TIME("maxCreationTime"), @@ -55,7 +56,13 @@ enum Option { START_INDEX("startIndex"), STATE_FILTER("stateFilter"), TIMEOUT("timeoutMs"), - REQUESTED_POLICY_VERSION("requestedPolicyVersion"); + REQUESTED_POLICY_VERSION("requestedPolicyVersion"), + TABLE_METADATA_VIEW("view"), + RETRY_OPTIONS("retryOptions"), + BIGQUERY_RETRY_CONFIG("bigQueryRetryConfig"), + ACCESS_POLICY_VERSION("accessPolicyVersion"), + DATASET_VIEW("datasetView"), + DATASET_UPDATE_MODE("datasetUpdateMode"); private final String value; @@ -122,6 +129,13 @@ Boolean getBoolean(Map options) { */ Job create(Job job, Map options); + /** + * Creates a new query job. + * + * @throws BigQueryException upon failure + */ + Job createJobForQuery(Job job); + /** * Delete the requested dataset. * @@ -222,6 +236,7 @@ Tuple> listModels( Tuple> listRoutines( String projectId, String datasetId, Map options); + /** * Deletes the requested routine. * @@ -246,6 +261,14 @@ TableDataInsertAllResponse insertAll( TableDataList listTableData( String projectId, String datasetId, String tableId, Map options); + /** + * Lists the table's rows with a limit on how many rows of data to pre-fetch. + * + * @throws BigQueryException upon failure + */ + TableDataList listTableDataWithRowLimit( + String projectId, String datasetId, String tableId, Integer rowLimit, String pageToken); + /** * Returns the requested job or {@code null} if not found. * @@ -253,6 +276,13 @@ TableDataList listTableData( */ Job getJob(String projectId, String jobId, String location, Map options); + /** + * Returns the requested query job or {@code null} if not found. + * + * @throws BigQueryException upon failure + */ + Job getQueryJob(String projectId, String jobId, String location); + /** * Lists the project's jobs. * @@ -270,6 +300,14 @@ TableDataList listTableData( */ boolean cancel(String projectId, String jobId, String location); + /** + * Sends a job delete request. + * + * @return {@code true} if delete was successful, {@code false} if the job was not found + * @throws BigQueryException upon failure + */ + boolean deleteJob(String projectId, String jobName, String location); + /** * Returns results of the query associated with the provided job. * @@ -278,6 +316,15 @@ TableDataList listTableData( GetQueryResultsResponse getQueryResults( String projectId, String jobId, String location, Map options); + /** + * Returns results of the query with a limit on how many rows of data to pre-fetch associated with + * the provided job. + * + * @throws BigQueryException upon failure + */ + GetQueryResultsResponse getQueryResultsWithRowLimit( + String projectId, String jobId, String location, Integer preFetchedRowLimit, Long timeoutMs); + /** * Runs a BigQuery SQL query synchronously and returns query results if the query completes within * a specified timeout. diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java index 4d9558454d..16737dc4b7 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java @@ -19,6 +19,7 @@ import static java.net.HttpURLConnection.HTTP_CREATED; import static java.net.HttpURLConnection.HTTP_NOT_FOUND; import static java.net.HttpURLConnection.HTTP_OK; +import static java.net.HttpURLConnection.HTTP_UNAUTHORIZED; import com.google.api.client.http.ByteArrayContent; import com.google.api.client.http.GenericUrl; @@ -30,7 +31,7 @@ import com.google.api.client.http.HttpTransport; import com.google.api.client.http.json.JsonHttpContent; import com.google.api.client.json.JsonFactory; -import com.google.api.client.json.jackson2.JacksonFactory; +import com.google.api.client.json.gson.GsonFactory; import com.google.api.core.InternalApi; import com.google.api.core.InternalExtensionOnly; import com.google.api.services.bigquery.Bigquery; @@ -68,6 +69,9 @@ import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; +import io.opentelemetry.api.common.Attributes; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanKind; import java.io.IOException; import java.math.BigInteger; import java.util.List; @@ -77,8 +81,7 @@ public class HttpBigQueryRpc implements BigQueryRpc { public static final String DEFAULT_PROJECTION = "full"; - private static final String BASE_RESUMABLE_URI = - "https://www.googleapis.com/upload/bigquery/v2/projects/"; + private static final String BASE_RESUMABLE_URI = "upload/bigquery/v2/projects/"; // see: // https://cloud.google.com/bigquery/loading-data-post-request#resume-upload private static final int HTTP_RESUME_INCOMPLETE = 308; @@ -95,6 +98,7 @@ public Dataset apply(DatasetList.Datasets datasetPb) { .setFriendlyName(datasetPb.getFriendlyName()) .setId(datasetPb.getId()) .setKind(datasetPb.getKind()) + .setLocation(datasetPb.getLocation()) .setLabels(datasetPb.getLabels()); } }; @@ -105,8 +109,8 @@ public HttpBigQueryRpc(BigQueryOptions options) { HttpRequestInitializer initializer = transportOptions.getHttpRequestInitializer(options); this.options = options; bigquery = - new Bigquery.Builder(transport, new JacksonFactory(), initializer) - .setRootUrl(options.getHost()) + new Bigquery.Builder(transport, new GsonFactory(), initializer) + .setRootUrl(options.getResolvedApiaryHost("bigquery")) .setApplicationName(options.getApplicationName()) .build(); } @@ -115,15 +119,20 @@ private static BigQueryException translate(IOException exception) { return new BigQueryException(exception); } + private void validateRPC() throws BigQueryException, IOException { + if (!this.options.hasValidUniverseDomain()) { + String errorMessage = + String.format( + "The configured universe domain %s does not match the universe domain found in the credentials %s. If you haven't configured the universe domain explicitly, `googleapis.com` is the default.", + this.options.getUniverseDomain(), this.options.getCredentials().getUniverseDomain()); + throw new BigQueryException(HTTP_UNAUTHORIZED, errorMessage); + } + } + @Override public Dataset getDataset(String projectId, String datasetId, Map options) { try { - return bigquery - .datasets() - .get(projectId, datasetId) - .setFields(Option.FIELDS.getString(options)) - .setPrettyPrint(false) - .execute(); + return getDatasetSkipExceptionTranslation(projectId, datasetId, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -133,104 +142,351 @@ public Dataset getDataset(String projectId, String datasetId, Map opt } } + @InternalApi("internal to java-bigquery") + public Dataset getDatasetSkipExceptionTranslation( + String projectId, String datasetId, Map options) throws IOException { + validateRPC(); + Bigquery.Datasets.Get bqGetRequest = + bigquery + .datasets() + .get(projectId, datasetId) + .setFields(Option.FIELDS.getString(options)) + .setPrettyPrint(false); + if (options.containsKey(Option.ACCESS_POLICY_VERSION)) { + bqGetRequest.setAccessPolicyVersion((Integer) options.get(Option.ACCESS_POLICY_VERSION)); + } + if (options.containsKey(Option.DATASET_VIEW)) { + bqGetRequest.setDatasetView(options.get(Option.DATASET_VIEW).toString()); + } + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getDataset = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getDataset = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getDataset") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "DatasetService") + .setAttribute("bq.rpc.method", "GetDataset") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + Dataset dataset = bqGetRequest.execute(); + if (getDataset != null) { + getDataset.setAttribute("bq.rpc.response.dataset.id", dataset.getId()); + getDataset.end(); + } + return dataset; + } + @Override public Tuple> listDatasets(String projectId, Map options) { try { - DatasetList datasetsList = - bigquery - .datasets() - .list(projectId) - .setPrettyPrint(false) - .setAll(Option.ALL_DATASETS.getBoolean(options)) - .setFilter(Option.LABEL_FILTER.getString(options)) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .execute(); - Iterable datasets = datasetsList.getDatasets(); - return Tuple.of( - datasetsList.getNextPageToken(), - Iterables.transform( - datasets != null ? datasets : ImmutableList.of(), - LIST_TO_DATASET)); + return listDatasetsSkipExceptionTranslation(projectId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Tuple> listDatasetsSkipExceptionTranslation( + String projectId, Map options) throws IOException { + validateRPC(); + Bigquery.Datasets.List datasetsListRequest = + bigquery + .datasets() + .list(projectId) + .setPrettyPrint(false) + .setAll(Option.ALL_DATASETS.getBoolean(options)) + .setFilter(Option.LABEL_FILTER.getString(options)) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)); + + datasetsListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listDatasets = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listDatasets = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listDatasets") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "DatasetService") + .setAttribute("bq.rpc.method", "ListDatasets") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", datasetsListRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + DatasetList datasetsList = datasetsListRequest.execute(); + Iterable datasets = datasetsList.getDatasets(); + if (listDatasets != null) { + listDatasets.setAttribute("bq.rpc.next_page_token", datasetsList.getNextPageToken()); + listDatasets.end(); + } + return Tuple.of( + datasetsList.getNextPageToken(), + Iterables.transform( + datasets != null ? datasets : ImmutableList.of(), + LIST_TO_DATASET)); + } + @Override public Dataset create(Dataset dataset, Map options) { try { - return bigquery - .datasets() - .insert(dataset.getDatasetReference().getProjectId(), dataset) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return createSkipExceptionTranslation(dataset, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Dataset createSkipExceptionTranslation(Dataset dataset, Map options) + throws IOException { + validateRPC(); + Bigquery.Datasets.Insert bqCreateRequest = + bigquery + .datasets() + .insert(dataset.getDatasetReference().getProjectId(), dataset) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + if (options.containsKey(Option.ACCESS_POLICY_VERSION)) { + bqCreateRequest.setAccessPolicyVersion((Integer) options.get(Option.ACCESS_POLICY_VERSION)); + } + bqCreateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span createDataset = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + createDataset = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.createDataset") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "DatasetService") + .setAttribute("bq.rpc.method", "InsertDataset") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Dataset datasetResponse = bqCreateRequest.execute(); + if (createDataset != null) { + createDataset.setAttribute("bq.rpc.response.dataset.id", datasetResponse.getId()); + createDataset.end(); + } + return datasetResponse; + } + @Override public Table create(Table table, Map options) { try { - // unset the type, as it is output only - table.setType(null); - TableReference reference = table.getTableReference(); - return bigquery - .tables() - .insert(reference.getProjectId(), reference.getDatasetId(), table) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return createSkipExceptionTranslation(table, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Table createSkipExceptionTranslation(Table table, Map options) + throws IOException { + validateRPC(); + // unset the type, as it is output only + table.setType(null); + TableReference reference = table.getTableReference(); + Bigquery.Tables.Insert bqCreateRequest = + bigquery + .tables() + .insert(reference.getProjectId(), reference.getDatasetId(), table) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqCreateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span createTable = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + createTable = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.createTable") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "InsertTable") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Table tableResponse = bqCreateRequest.execute(); + if (createTable != null) { + createTable.setAttribute("bq.rpc.response.table.id", tableResponse.getId()); + createTable.end(); + } + return tableResponse; + } + @Override public Routine create(Routine routine, Map options) { try { - RoutineReference reference = routine.getRoutineReference(); - return bigquery - .routines() - .insert(reference.getProjectId(), reference.getDatasetId(), routine) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return createSkipExceptionTranslation(routine, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Routine createSkipExceptionTranslation(Routine routine, Map options) + throws IOException { + validateRPC(); + RoutineReference reference = routine.getRoutineReference(); + Bigquery.Routines.Insert bqCreateRequest = + bigquery + .routines() + .insert(reference.getProjectId(), reference.getDatasetId(), routine) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqCreateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span createRoutine = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + createRoutine = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.createRoutine") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "RoutineService") + .setAttribute("bq.rpc.method", "InsertRoutine") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Routine routineResponse = bqCreateRequest.execute(); + if (createRoutine != null) { + createRoutine.setAttribute( + "bq.rpc.response.routine.id", routineResponse.getRoutineReference().getRoutineId()); + createRoutine.end(); + } + return routineResponse; + } + @Override public Job create(Job job, Map options) { try { - String projectId = - job.getJobReference() != null - ? job.getJobReference().getProjectId() - : this.options.getProjectId(); - return bigquery - .jobs() - .insert(projectId, job) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return createSkipExceptionTranslation(job, options); + } catch (IOException ex) { + throw translate(ex); + } + } + + @InternalApi("internal to java-bigquery") + public Job createSkipExceptionTranslation(Job job, Map options) throws IOException { + validateRPC(); + String projectId = + job.getJobReference() != null + ? job.getJobReference().getProjectId() + : this.options.getProjectId(); + Bigquery.Jobs.Insert bqCreateRequest = + bigquery + .jobs() + .insert(projectId, job) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqCreateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span createJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + createJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.createJob") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "InsertJob") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Job jobResponse = bqCreateRequest.execute(); + if (createJob != null) { + createJob.setAttribute("bq.rpc.response.job.id", jobResponse.getId()); + createJob.setAttribute( + "bq.rpc.response.job.status.state", jobResponse.getStatus().getState()); + createJob.end(); + } + return jobResponse; + } + + @Override + public Job createJobForQuery(Job job) { + try { + return createJobForQuerySkipExceptionTranslation(job); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Job createJobForQuerySkipExceptionTranslation(Job job) throws IOException { + validateRPC(); + String projectId = + job.getJobReference() != null + ? job.getJobReference().getProjectId() + : this.options.getProjectId(); + Bigquery.Jobs.Insert bqCreateRequest = + bigquery.jobs().insert(projectId, job).setPrettyPrint(false); + + bqCreateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span createJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + createJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.createJobForQuery") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "InsertJob") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + Job jobResponse = bqCreateRequest.execute(); + if (createJob != null) { + createJob.setAttribute("bq.rpc.response.job.id", jobResponse.getId()); + createJob.setAttribute( + "bq.rpc.response.job.status.state", jobResponse.getStatus().getState()); + createJob.end(); + } + return jobResponse; + } + @Override public boolean deleteDataset(String projectId, String datasetId, Map options) { try { - bigquery - .datasets() - .delete(projectId, datasetId) - .setPrettyPrint(false) - .setDeleteContents(Option.DELETE_CONTENTS.getBoolean(options)) - .execute(); - return true; + return deleteDatasetSkipExceptionTranslation(projectId, datasetId, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -240,48 +496,150 @@ public boolean deleteDataset(String projectId, String datasetId, Map } } + @InternalApi("internal to java-bigquery") + public boolean deleteDatasetSkipExceptionTranslation( + String projectId, String datasetId, Map options) throws IOException { + validateRPC(); + Bigquery.Datasets.Delete bqDeleteRequest = + bigquery + .datasets() + .delete(projectId, datasetId) + .setPrettyPrint(false) + .setDeleteContents(Option.DELETE_CONTENTS.getBoolean(options)); + + bqDeleteRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span deleteDataset = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + deleteDataset = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.deleteDataset") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "DatasetService") + .setAttribute("bq.rpc.method", "DeleteDataset") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + bqDeleteRequest.execute(); + if (deleteDataset != null) { + deleteDataset.end(); + } + return true; + } + @Override public Dataset patch(Dataset dataset, Map options) { try { - DatasetReference reference = dataset.getDatasetReference(); - return bigquery - .datasets() - .patch(reference.getProjectId(), reference.getDatasetId(), dataset) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return patchSkipExceptionTranslation(dataset, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Dataset patchSkipExceptionTranslation(Dataset dataset, Map options) + throws IOException { + validateRPC(); + DatasetReference reference = dataset.getDatasetReference(); + Bigquery.Datasets.Patch bqPatchRequest = + bigquery + .datasets() + .patch(reference.getProjectId(), reference.getDatasetId(), dataset) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + if (options.containsKey(Option.ACCESS_POLICY_VERSION)) { + bqPatchRequest.setAccessPolicyVersion((Integer) options.get(Option.ACCESS_POLICY_VERSION)); + } + if (options.containsKey(Option.DATASET_UPDATE_MODE)) { + bqPatchRequest.setUpdateMode(options.get(Option.DATASET_UPDATE_MODE).toString()); + } + bqPatchRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span patchDataset = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + patchDataset = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.patchDataset") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "DatasetService") + .setAttribute("bq.rpc.method", "PatchDataset") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Dataset datasetResponse = bqPatchRequest.execute(); + if (patchDataset != null) { + patchDataset.setAttribute("bq.rpc.response.dataset.id", datasetResponse.getId()); + patchDataset.end(); + } + return datasetResponse; + } + @Override public Table patch(Table table, Map options) { try { - // unset the type, as it is output only - table.setType(null); - TableReference reference = table.getTableReference(); - return bigquery - .tables() - .patch(reference.getProjectId(), reference.getDatasetId(), reference.getTableId(), table) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return patchSkipExceptionTranslation(table, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Table patchSkipExceptionTranslation(Table table, Map options) + throws IOException { + validateRPC(); + // unset the type, as it is output only + table.setType(null); + TableReference reference = table.getTableReference(); + Bigquery.Tables.Patch bqPatchRequest = + bigquery + .tables() + .patch( + reference.getProjectId(), reference.getDatasetId(), reference.getTableId(), table) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)) + .setAutodetectSchema(BigQueryRpc.Option.AUTODETECT_SCHEMA.getBoolean(options)); + + bqPatchRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span patchTable = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + patchTable = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.patchTable") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "PatchTable") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Table tableResponse = bqPatchRequest.execute(); + if (patchTable != null) { + patchTable.setAttribute("bq.rpc.response.table.id", tableResponse.getId()); + patchTable.end(); + } + return tableResponse; + } + @Override public Table getTable( String projectId, String datasetId, String tableId, Map options) { try { - return bigquery - .tables() - .get(projectId, datasetId, tableId) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return getTableSkipExceptionTranslation(projectId, datasetId, tableId, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -291,47 +649,126 @@ public Table getTable( } } + @InternalApi("internal to java-bigquery") + public Table getTableSkipExceptionTranslation( + String projectId, String datasetId, String tableId, Map options) + throws IOException { + validateRPC(); + Bigquery.Tables.Get bqGetRequest = + bigquery + .tables() + .get(projectId, datasetId, tableId) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)) + .setView(getTableMetadataOption(options)); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getTable = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getTable = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getTable") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "GetTable") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Table tableResponse = bqGetRequest.execute(); + if (getTable != null) { + getTable.setAttribute("bq.rpc.response.table.id", tableResponse.getId()); + getTable.end(); + } + return tableResponse; + } + + private String getTableMetadataOption(Map options) { + if (options.containsKey(Option.TABLE_METADATA_VIEW)) { + return options.get(Option.TABLE_METADATA_VIEW).toString(); + } + return "STORAGE_STATS"; + } + @Override public Tuple> listTables( String projectId, String datasetId, Map options) { try { - TableList tableList = - bigquery - .tables() - .list(projectId, datasetId) - .setPrettyPrint(false) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .execute(); - Iterable tables = tableList.getTables(); - return Tuple.of( - tableList.getNextPageToken(), - Iterables.transform( - tables != null ? tables : ImmutableList.of(), - new Function() { - @Override - public Table apply(TableList.Tables tablePb) { - return new Table() - .setFriendlyName(tablePb.getFriendlyName()) - .setId(tablePb.getId()) - .setKind(tablePb.getKind()) - .setTableReference(tablePb.getTableReference()) - .setType(tablePb.getType()) - .setCreationTime(tablePb.getCreationTime()) - .setTimePartitioning(tablePb.getTimePartitioning()) - .setRangePartitioning(tablePb.getRangePartitioning()); - } - })); + return listTablesSkipExceptionTranslation(projectId, datasetId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Tuple> listTablesSkipExceptionTranslation( + String projectId, String datasetId, Map options) throws IOException { + validateRPC(); + Bigquery.Tables.List tableListRequest = + bigquery + .tables() + .list(projectId, datasetId) + .setPrettyPrint(false) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)); + + tableListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listTables = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listTables = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listTables") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "ListTables") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", tableListRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + TableList tableResponse = tableListRequest.execute(); + if (listTables != null) { + listTables.setAttribute("bq.rpc.next_page_token", tableResponse.getNextPageToken()); + listTables.end(); + } + + Iterable tables = tableResponse.getTables(); + return Tuple.of( + tableResponse.getNextPageToken(), + Iterables.transform( + tables != null ? tables : ImmutableList.of(), + new Function() { + @Override + public Table apply(TableList.Tables tablePb) { + return new Table() + .setFriendlyName(tablePb.getFriendlyName()) + .setId(tablePb.getId()) + .setKind(tablePb.getKind()) + .setTableReference(tablePb.getTableReference()) + .setType(tablePb.getType()) + .setCreationTime(tablePb.getCreationTime()) + .setTimePartitioning(tablePb.getTimePartitioning()) + .setRangePartitioning(tablePb.getRangePartitioning()) + .setClustering(tablePb.getClustering()) + .setLabels(tablePb.getLabels()); + } + })); + } + @Override public boolean deleteTable(String projectId, String datasetId, String tableId) { try { - bigquery.tables().delete(projectId, datasetId, tableId).execute(); - return true; + return deleteTableSkipExceptionTranslation(projectId, datasetId, tableId); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -341,32 +778,92 @@ public boolean deleteTable(String projectId, String datasetId, String tableId) { } } + @InternalApi("internal to java-bigquery") + public boolean deleteTableSkipExceptionTranslation( + String projectId, String datasetId, String tableId) throws IOException { + validateRPC(); + Bigquery.Tables.Delete bqDeleteRequest = + bigquery.tables().delete(projectId, datasetId, tableId); + + bqDeleteRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span deleteTable = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + deleteTable = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.deleteTable") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "DeleteTable") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + bqDeleteRequest.execute(); + if (deleteTable != null) { + deleteTable.end(); + } + return true; + } + @Override public Model patch(Model model, Map options) { try { - // unset the type, as it is output only - ModelReference reference = model.getModelReference(); - return bigquery - .models() - .patch(reference.getProjectId(), reference.getDatasetId(), reference.getModelId(), model) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return patchSkipExceptionTranslation(model, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Model patchSkipExceptionTranslation(Model model, Map options) + throws IOException { + validateRPC(); + // unset the type, as it is output only + ModelReference reference = model.getModelReference(); + Bigquery.Models.Patch bqPatchRequest = + bigquery + .models() + .patch( + reference.getProjectId(), reference.getDatasetId(), reference.getModelId(), model) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqPatchRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span patchModel = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + patchModel = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.patchModel") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "ModelService") + .setAttribute("bq.rpc.method", "PatchModel") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Model modelResponse = bqPatchRequest.execute(); + if (patchModel != null) { + patchModel.setAttribute( + "bq.rpc.response.model.id", modelResponse.getModelReference().getModelId()); + patchModel.end(); + } + return modelResponse; + } + @Override public Model getModel( String projectId, String datasetId, String modelId, Map options) { try { - return bigquery - .models() - .get(projectId, datasetId, modelId) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return getModelSkipExceptionTranslation(projectId, datasetId, modelId, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -376,31 +873,101 @@ public Model getModel( } } + @InternalApi("internal to java-bigquery") + public Model getModelSkipExceptionTranslation( + String projectId, String datasetId, String modelId, Map options) + throws IOException { + validateRPC(); + Bigquery.Models.Get bqGetRequest = + bigquery + .models() + .get(projectId, datasetId, modelId) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getModel = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getModel = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getModel") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "ModelService") + .setAttribute("bq.rpc.method", "GetModel") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Model modelResponse = bqGetRequest.execute(); + if (getModel != null) { + getModel.setAttribute( + "bq.rpc.response.model.id", modelResponse.getModelReference().getModelId()); + getModel.end(); + } + return modelResponse; + } + @Override public Tuple> listModels( String projectId, String datasetId, Map options) { try { - ListModelsResponse modelList = - bigquery - .models() - .list(projectId, datasetId) - .setPrettyPrint(false) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .execute(); - Iterable models = - modelList.getModels() != null ? modelList.getModels() : ImmutableList.of(); - return Tuple.of(modelList.getNextPageToken(), models); + return listModelsSkipExceptionTranslation(projectId, datasetId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Tuple> listModelsSkipExceptionTranslation( + String projectId, String datasetId, Map options) throws IOException { + validateRPC(); + Bigquery.Models.List modelListRequest = + bigquery + .models() + .list(projectId, datasetId) + .setPrettyPrint(false) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)); + + modelListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listModels = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listModels = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listModels") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "ModelService") + .setAttribute("bq.rpc.method", "ListModels") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", modelListRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + ListModelsResponse modelResponse = modelListRequest.execute(); + if (listModels != null) { + listModels.setAttribute("bq.rpc.next_page_token", modelResponse.getNextPageToken()); + listModels.end(); + } + + Iterable models = + modelResponse.getModels() != null ? modelResponse.getModels() : ImmutableList.of(); + return Tuple.of(modelResponse.getNextPageToken(), models); + } + @Override public boolean deleteModel(String projectId, String datasetId, String modelId) { try { - bigquery.models().delete(projectId, datasetId, modelId).execute(); - return true; + return deleteModelSkipExceptionTranslation(projectId, datasetId, modelId); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -410,32 +977,94 @@ public boolean deleteModel(String projectId, String datasetId, String modelId) { } } + @InternalApi("internal to java-bigquery") + public boolean deleteModelSkipExceptionTranslation( + String projectId, String datasetId, String modelId) throws IOException { + validateRPC(); + Bigquery.Models.Delete bqDeleteRequest = + bigquery.models().delete(projectId, datasetId, modelId); + + bqDeleteRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span deleteModels = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + deleteModels = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.deleteModel") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "ModelService") + .setAttribute("bq.rpc.method", "DeleteModel") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + bqDeleteRequest.execute(); + if (deleteModels != null) { + deleteModels.end(); + } + return true; + } + @Override public Routine update(Routine routine, Map options) { try { - RoutineReference reference = routine.getRoutineReference(); - return bigquery - .routines() - .update( - reference.getProjectId(), reference.getDatasetId(), reference.getRoutineId(), routine) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return updateSkipExceptionTranslation(routine, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Routine updateSkipExceptionTranslation(Routine routine, Map options) + throws IOException { + validateRPC(); + RoutineReference reference = routine.getRoutineReference(); + Bigquery.Routines.Update bqUpdateRequest = + bigquery + .routines() + .update( + reference.getProjectId(), + reference.getDatasetId(), + reference.getRoutineId(), + routine) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqUpdateRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span updateRoutine = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + updateRoutine = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.updateRoutine") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "RoutineService") + .setAttribute("bq.rpc.method", "UpdateRoutine") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Routine routineResponse = bqUpdateRequest.execute(); + if (updateRoutine != null) { + updateRoutine.setAttribute( + "bq.rpc.response.routine.id", routineResponse.getRoutineReference().getRoutineId()); + updateRoutine.end(); + } + return routineResponse; + } + @Override public Routine getRoutine( String projectId, String datasetId, String routineId, Map options) { try { - return bigquery - .routines() - .get(projectId, datasetId, routineId) - .setPrettyPrint(false) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return getRoutineSkipExceptionTranslation(projectId, datasetId, routineId, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -445,33 +1074,102 @@ public Routine getRoutine( } } + @InternalApi("internal to java-bigquery") + public Routine getRoutineSkipExceptionTranslation( + String projectId, String datasetId, String routineId, Map options) + throws IOException { + validateRPC(); + Bigquery.Routines.Get bqGetRequest = + bigquery + .routines() + .get(projectId, datasetId, routineId) + .setPrettyPrint(false) + .setFields(Option.FIELDS.getString(options)); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getRoutine = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getRoutine = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getRoutine") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "RoutineService") + .setAttribute("bq.rpc.method", "GetRoutine") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Routine routineResponse = bqGetRequest.execute(); + if (getRoutine != null) { + getRoutine.setAttribute( + "bq.rpc.response.routine.id", routineResponse.getRoutineReference().getRoutineId()); + getRoutine.end(); + } + return routineResponse; + } + @Override public Tuple> listRoutines( String projectId, String datasetId, Map options) { try { - ListRoutinesResponse routineList = - bigquery - .routines() - .list(projectId, datasetId) - .setPrettyPrint(false) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .execute(); - Iterable routines = - routineList.getRoutines() != null - ? routineList.getRoutines() - : ImmutableList.of(); - return Tuple.of(routineList.getNextPageToken(), routines); + return listRoutinesSkipExceptionTranslation(projectId, datasetId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Tuple> listRoutinesSkipExceptionTranslation( + String projectId, String datasetId, Map options) throws IOException { + validateRPC(); + Bigquery.Routines.List routineListRequest = + bigquery + .routines() + .list(projectId, datasetId) + .setPrettyPrint(false) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)); + + routineListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listRoutines = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listRoutines = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listRoutines") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "RoutineService") + .setAttribute("bq.rpc.method", "ListRoutines") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", routineListRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + ListRoutinesResponse routineResponse = routineListRequest.execute(); + if (listRoutines != null) { + listRoutines.setAttribute("bq.rpc.next_page_token", routineResponse.getNextPageToken()); + listRoutines.end(); + } + Iterable routines = + routineResponse.getRoutines() != null + ? routineResponse.getRoutines() + : ImmutableList.of(); + return Tuple.of(routineResponse.getNextPageToken(), routines); + } + @Override public boolean deleteRoutine(String projectId, String datasetId, String routineId) { try { - bigquery.routines().delete(projectId, datasetId, routineId).execute(); - return true; + return deleteRoutineSkipExceptionTranslation(projectId, datasetId, routineId); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -481,50 +1179,196 @@ public boolean deleteRoutine(String projectId, String datasetId, String routineI } } + @InternalApi("internal to java-bigquery") + public boolean deleteRoutineSkipExceptionTranslation( + String projectId, String datasetId, String routineId) throws IOException { + validateRPC(); + Bigquery.Routines.Delete bqDeleteRequest = + bigquery.routines().delete(projectId, datasetId, routineId); + + bqDeleteRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span deleteRoutine = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + deleteRoutine = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listRoutines") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "RoutineService") + .setAttribute("bq.rpc.method", "ListRoutines") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + bqDeleteRequest.execute(); + if (deleteRoutine != null) { + deleteRoutine.end(); + } + return true; + } + @Override public TableDataInsertAllResponse insertAll( String projectId, String datasetId, String tableId, TableDataInsertAllRequest request) { try { - return bigquery - .tabledata() - .insertAll(projectId, datasetId, tableId, request) - .setPrettyPrint(false) - .execute(); + return insertAllSkipExceptionTranslation(projectId, datasetId, tableId, request); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public TableDataInsertAllResponse insertAllSkipExceptionTranslation( + String projectId, String datasetId, String tableId, TableDataInsertAllRequest request) + throws IOException { + validateRPC(); + Bigquery.Tabledata.InsertAll insertAllRequest = + bigquery + .tabledata() + .insertAll(projectId, datasetId, tableId, request) + .setPrettyPrint(false); + + insertAllRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span insertAll = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + insertAll = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.insertAll") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableDataService") + .setAttribute("bq.rpc.method", "InsertAll") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + TableDataInsertAllResponse insertAllResponse = insertAllRequest.execute(); + if (insertAll != null) { + insertAll.end(); + } + return insertAllResponse; + } + @Override public TableDataList listTableData( String projectId, String datasetId, String tableId, Map options) { try { - return bigquery - .tabledata() - .list(projectId, datasetId, tableId) - .setPrettyPrint(false) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .setStartIndex( - Option.START_INDEX.getLong(options) != null - ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) - : null) - .execute(); + return listTableDataSkipExceptionTranslation(projectId, datasetId, tableId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public TableDataList listTableDataSkipExceptionTranslation( + String projectId, String datasetId, String tableId, Map options) + throws IOException { + validateRPC(); + Bigquery.Tabledata.List bqListRequest = + bigquery + .tabledata() + .list(projectId, datasetId, tableId) + .setPrettyPrint(false) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)) + .setStartIndex( + Option.START_INDEX.getLong(options) != null + ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) + : null); + + bqListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listTableData = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listTableData = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listTableData") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableDataService") + .setAttribute("bq.rpc.method", "List") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", bqListRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + TableDataList bqListResponse = bqListRequest.execute(); + if (listTableData != null) { + listTableData.end(); + } + return bqListResponse; + } + + @Override + public TableDataList listTableDataWithRowLimit( + String projectId, + String datasetId, + String tableId, + Integer maxResultPerPage, + String pageToken) { + try { + return listTableDataWithRowLimitSkipExceptionTranslation( + projectId, datasetId, tableId, maxResultPerPage, pageToken); + } catch (IOException ex) { + throw translate(ex); + } + } + + @InternalApi("internal to java-bigquery") + public TableDataList listTableDataWithRowLimitSkipExceptionTranslation( + String projectId, + String datasetId, + String tableId, + Integer maxResultPerPage, + String pageToken) + throws IOException { + validateRPC(); + Bigquery.Tabledata.List bqListRequest = + bigquery + .tabledata() + .list(projectId, datasetId, tableId) + .setPrettyPrint(false) + .setMaxResults(Long.valueOf(maxResultPerPage)) + .setPageToken(pageToken); + + bqListRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listTableData = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listTableData = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listTableDataWithRowLimit") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableDataService") + .setAttribute("bq.rpc.method", "List") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", bqListRequest.getPageToken()) + .startSpan(); + } + TableDataList bqListResponse = bqListRequest.execute(); + if (listTableData != null) { + listTableData.end(); + } + return bqListResponse; + } + @Override public Job getJob(String projectId, String jobId, String location, Map options) { try { - return bigquery - .jobs() - .get(projectId, jobId) - .setPrettyPrint(false) - .setLocation(location) - .setFields(Option.FIELDS.getString(options)) - .execute(); + return getJobSkipExceptionTranslation(projectId, jobId, location, options); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -534,70 +1378,181 @@ public Job getJob(String projectId, String jobId, String location, Map options) throws IOException { + validateRPC(); + Bigquery.Jobs.Get bqGetRequest = + bigquery + .jobs() + .get(projectId, jobId) + .setPrettyPrint(false) + .setLocation(location) + .setFields(Option.FIELDS.getString(options)); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getJob") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "GetJob") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + Job jobResponse = bqGetRequest.execute(); + if (getJob != null) { + getJob.setAttribute("bq.rpc.response.job.id", jobResponse.getId()); + getJob.setAttribute("bq.rpc.response.job.status.state", jobResponse.getStatus().getState()); + getJob.end(); + } + return jobResponse; + } + @Override - public Tuple> listJobs(String projectId, Map options) { + public Job getQueryJob(String projectId, String jobId, String location) { try { - Bigquery.Jobs.List request = - bigquery - .jobs() - .list(projectId) - .setPrettyPrint(false) - .setAllUsers(Option.ALL_USERS.getBoolean(options)) - .setFields(Option.FIELDS.getString(options)) - .setStateFilter(Option.STATE_FILTER.>get(options)) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .setProjection(DEFAULT_PROJECTION) - .setParentJobId(Option.PARENT_JOB_ID.getString(options)); - if (Option.MIN_CREATION_TIME.getLong(options) != null) { - request.setMinCreationTime(BigInteger.valueOf(Option.MIN_CREATION_TIME.getLong(options))); - } - if (Option.MAX_CREATION_TIME.getLong(options) != null) { - request.setMaxCreationTime(BigInteger.valueOf(Option.MAX_CREATION_TIME.getLong(options))); + return getQueryJobSkipExceptionTranslation(projectId, jobId, location); + } catch (IOException ex) { + BigQueryException serviceException = translate(ex); + if (serviceException.getCode() == HTTP_NOT_FOUND) { + return null; } - JobList jobsList = request.execute(); - - Iterable jobs = jobsList.getJobs(); - return Tuple.of( - jobsList.getNextPageToken(), - Iterables.transform( - jobs != null ? jobs : ImmutableList.of(), - new Function() { - @Override - public Job apply(JobList.Jobs jobPb) { - JobStatus statusPb = - jobPb.getStatus() != null ? jobPb.getStatus() : new JobStatus(); - if (statusPb.getState() == null) { - statusPb.setState(jobPb.getState()); - } - if (statusPb.getErrorResult() == null) { - statusPb.setErrorResult(jobPb.getErrorResult()); - } - return new Job() - .setConfiguration(jobPb.getConfiguration()) - .setId(jobPb.getId()) - .setJobReference(jobPb.getJobReference()) - .setKind(jobPb.getKind()) - .setStatistics(jobPb.getStatistics()) - .setStatus(statusPb) - .setUserEmail(jobPb.getUserEmail()); - } - })); + throw serviceException; + } + } + + @InternalApi("internal to java-bigquery") + public Job getQueryJobSkipExceptionTranslation(String projectId, String jobId, String location) + throws IOException { + validateRPC(); + Bigquery.Jobs.Get bqGetRequest = + bigquery.jobs().get(projectId, jobId).setPrettyPrint(false).setLocation(location); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getQueryJob") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "GetJob") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + Job jobResponse = bqGetRequest.execute(); + if (getJob != null) { + getJob.setAttribute("bq.rpc.response.job.id", jobResponse.getId()); + getJob.setAttribute("bq.rpc.response.job.status.state", jobResponse.getStatus().getState()); + getJob.end(); + } + return jobResponse; + } + + @Override + public Tuple> listJobs(String projectId, Map options) { + try { + return listJobsSkipExceptionTranslation(projectId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Tuple> listJobsSkipExceptionTranslation( + String projectId, Map options) throws IOException { + validateRPC(); + Bigquery.Jobs.List listJobsRequest = + bigquery + .jobs() + .list(projectId) + .setPrettyPrint(false) + .setAllUsers(Option.ALL_USERS.getBoolean(options)) + .setFields(Option.FIELDS.getString(options)) + .setStateFilter(Option.STATE_FILTER.>get(options)) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)) + .setProjection(DEFAULT_PROJECTION) + .setParentJobId(Option.PARENT_JOB_ID.getString(options)); + if (Option.MIN_CREATION_TIME.getLong(options) != null) { + listJobsRequest.setMinCreationTime( + BigInteger.valueOf(Option.MIN_CREATION_TIME.getLong(options))); + } + if (Option.MAX_CREATION_TIME.getLong(options) != null) { + listJobsRequest.setMaxCreationTime( + BigInteger.valueOf(Option.MAX_CREATION_TIME.getLong(options))); + } + listJobsRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span listJobs = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + listJobs = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.listJobs") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "ListJobs") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", listJobsRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + JobList jobsList = listJobsRequest.execute(); + if (listJobs != null) { + listJobs.setAttribute("bq.rpc.next_page_token", jobsList.getNextPageToken()); + listJobs.end(); + } + + Iterable jobs = jobsList.getJobs(); + return Tuple.of( + jobsList.getNextPageToken(), + Iterables.transform( + jobs != null ? jobs : ImmutableList.of(), + new Function() { + @Override + public Job apply(JobList.Jobs jobPb) { + JobStatus statusPb = + jobPb.getStatus() != null ? jobPb.getStatus() : new JobStatus(); + if (statusPb.getState() == null) { + statusPb.setState(jobPb.getState()); + } + if (statusPb.getErrorResult() == null) { + statusPb.setErrorResult(jobPb.getErrorResult()); + } + return new Job() + .setConfiguration(jobPb.getConfiguration()) + .setId(jobPb.getId()) + .setJobReference(jobPb.getJobReference()) + .setKind(jobPb.getKind()) + .setStatistics(jobPb.getStatistics()) + .setStatus(statusPb) + .setUserEmail(jobPb.getUserEmail()); + } + })); + } + @Override public boolean cancel(String projectId, String jobId, String location) { try { - bigquery - .jobs() - .cancel(projectId, jobId) - .setLocation(location) - .setPrettyPrint(false) - .execute(); - return true; + return cancelSkipExceptionTranslation(projectId, jobId, location); } catch (IOException ex) { BigQueryException serviceException = translate(ex); if (serviceException.getCode() == HTTP_NOT_FOUND) { @@ -607,55 +1562,249 @@ public boolean cancel(String projectId, String jobId, String location) { } } + @InternalApi("internal to java-bigquery") + public boolean cancelSkipExceptionTranslation(String projectId, String jobId, String location) + throws IOException { + validateRPC(); + Bigquery.Jobs.Cancel bqCancelRequest = + bigquery.jobs().cancel(projectId, jobId).setLocation(location).setPrettyPrint(false); + + bqCancelRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span cancelJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + cancelJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.cancelJob") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "CancelJob") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + bqCancelRequest.execute(); + if (cancelJob != null) { + cancelJob.end(); + } + return true; + } + + @Override + public boolean deleteJob(String projectId, String jobName, String location) { + try { + return deleteJobSkipExceptionTranslation(projectId, jobName, location); + } catch (IOException ex) { + throw translate(ex); + } + } + + @InternalApi("internal to java-bigquery") + public boolean deleteJobSkipExceptionTranslation( + String projectId, String jobName, String location) throws IOException { + validateRPC(); + Bigquery.Jobs.Delete bqDeleteRequest = + bigquery.jobs().delete(projectId, jobName).setLocation(location).setPrettyPrint(false); + + bqDeleteRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span deleteJob = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + deleteJob = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.deleteJob") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "DeleteJob") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + bqDeleteRequest.execute(); + if (deleteJob != null) { + deleteJob.end(); + } + return true; + } + @Override public GetQueryResultsResponse getQueryResults( String projectId, String jobId, String location, Map options) { try { - return bigquery - .jobs() - .getQueryResults(projectId, jobId) - .setPrettyPrint(false) - .setLocation(location) - .setMaxResults(Option.MAX_RESULTS.getLong(options)) - .setPageToken(Option.PAGE_TOKEN.getString(options)) - .setStartIndex( - Option.START_INDEX.getLong(options) != null - ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) - : null) - .setTimeoutMs(Option.TIMEOUT.getLong(options)) - .execute(); + return getQueryResultsSkipExceptionTranslation(projectId, jobId, location, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public GetQueryResultsResponse getQueryResultsSkipExceptionTranslation( + String projectId, String jobId, String location, Map options) throws IOException { + validateRPC(); + Bigquery.Jobs.GetQueryResults queryRequest = + bigquery + .jobs() + .getQueryResults(projectId, jobId) + .setPrettyPrint(false) + .setLocation(location) + .setMaxResults(Option.MAX_RESULTS.getLong(options)) + .setPageToken(Option.PAGE_TOKEN.getString(options)) + .setStartIndex( + Option.START_INDEX.getLong(options) != null + ? BigInteger.valueOf(Option.START_INDEX.getLong(options)) + : null) + .setTimeoutMs(Option.TIMEOUT.getLong(options)); + + queryRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getQueryResults = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getQueryResults = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getQueryResults") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "GetQueryResults") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", queryRequest.getPageToken()) + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + GetQueryResultsResponse queryResponse = queryRequest.execute(); + if (getQueryResults != null) { + getQueryResults.end(); + } + return queryResponse; + } + + @Override + public GetQueryResultsResponse getQueryResultsWithRowLimit( + String projectId, String jobId, String location, Integer maxResultPerPage, Long timeoutMs) { + try { + return getQueryResultsWithRowLimitSkipExceptionTranslation( + projectId, jobId, location, maxResultPerPage, timeoutMs); + } catch (IOException ex) { + throw translate(ex); + } + } + + @InternalApi("internal to java-bigquery") + public GetQueryResultsResponse getQueryResultsWithRowLimitSkipExceptionTranslation( + String projectId, String jobId, String location, Integer maxResultPerPage, Long timeoutMs) + throws IOException { + validateRPC(); + Bigquery.Jobs.GetQueryResults queryRequest = + bigquery + .jobs() + .getQueryResults(projectId, jobId) + .setPrettyPrint(false) + .setLocation(location) + .setMaxResults(Long.valueOf(maxResultPerPage)) + .setTimeoutMs(timeoutMs); + + queryRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getQueryResults = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getQueryResults = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getQueryResultsWithRowLimit") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "GetQueryResults") + .setAttribute("bq.rpc.system", "http") + .setAttribute("bq.rpc.page_token", queryRequest.getPageToken()) + .startSpan(); + } + + GetQueryResultsResponse queryResponse = queryRequest.execute(); + if (getQueryResults != null) { + getQueryResults.end(); + } + return queryResponse; + } + @Override public QueryResponse queryRpc(String projectId, QueryRequest content) { try { - return bigquery.jobs().query(projectId, content).execute(); + return queryRpcSkipExceptionTranslation(projectId, content); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public QueryResponse queryRpcSkipExceptionTranslation(String projectId, QueryRequest content) + throws IOException { + validateRPC(); + Bigquery.Jobs.Query queryRequest = bigquery.jobs().query(projectId, content); + queryRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getQueryResults = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getQueryResults = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.queryRpc") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "JobService") + .setAttribute("bq.rpc.method", "Query") + .setAttribute("bq.rpc.system", "http") + .startSpan(); + } + + QueryResponse queryResponse = queryRequest.execute(); + if (getQueryResults != null) { + getQueryResults.end(); + } + return queryResponse; + } + @Override public String open(Job loadJob) { try { - String builder = BASE_RESUMABLE_URI + options.getProjectId() + "/jobs"; - GenericUrl url = new GenericUrl(builder); - url.set("uploadType", "resumable"); - JsonFactory jsonFactory = bigquery.getJsonFactory(); - HttpRequestFactory requestFactory = bigquery.getRequestFactory(); - HttpRequest httpRequest = - requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, loadJob)); - httpRequest.getHeaders().set("X-Upload-Content-Value", "application/octet-stream"); - HttpResponse response = httpRequest.execute(); - return response.getHeaders().getLocation(); + return openSkipExceptionTranslation(loadJob); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public String openSkipExceptionTranslation(Job loadJob) throws IOException { + String builder = options.getResolvedApiaryHost("bigquery"); + if (!builder.endsWith("/")) { + builder += "/"; + } + builder += BASE_RESUMABLE_URI + options.getProjectId() + "/jobs"; + GenericUrl url = new GenericUrl(builder); + url.set("uploadType", "resumable"); + JsonFactory jsonFactory = bigquery.getJsonFactory(); + HttpRequestFactory requestFactory = bigquery.getRequestFactory(); + HttpRequest httpRequest = + requestFactory.buildPostRequest(url, new JsonHttpContent(jsonFactory, loadJob)); + httpRequest.getHeaders().set("X-Upload-Content-Value", "application/octet-stream"); + HttpResponse response = httpRequest.execute(); + return response.getHeaders().getLocation(); + } + @Override public Job write( String uploadId, @@ -665,98 +1814,200 @@ public Job write( int length, boolean last) { try { - if (length == 0) { - return null; - } - GenericUrl url = new GenericUrl(uploadId); - HttpRequest httpRequest = - bigquery - .getRequestFactory() - .buildPutRequest(url, new ByteArrayContent(null, toWrite, toWriteOffset, length)); - httpRequest.setParser(bigquery.getObjectParser()); - long limit = destOffset + length; - StringBuilder range = new StringBuilder("bytes "); - range.append(destOffset).append('-').append(limit - 1).append('/'); - if (last) { - range.append(limit); - } else { - range.append('*'); - } - httpRequest.getHeaders().setContentRange(range.toString()); - int code; - String message; - IOException exception = null; - HttpResponse response = null; - try { - response = httpRequest.execute(); - code = response.getStatusCode(); - message = response.getStatusMessage(); - } catch (HttpResponseException ex) { - exception = ex; - code = ex.getStatusCode(); - message = ex.getStatusMessage(); - } - if (!last && code != HTTP_RESUME_INCOMPLETE - || last && !(code == HTTP_OK || code == HTTP_CREATED)) { - if (exception != null) { - throw exception; - } - throw new BigQueryException(code, message); - } - return last && response != null ? response.parseAs(Job.class) : null; + return writeSkipExceptionTranslation( + uploadId, toWrite, toWriteOffset, destOffset, length, last); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Job writeSkipExceptionTranslation( + String uploadId, byte[] toWrite, int toWriteOffset, long destOffset, int length, boolean last) + throws IOException { + if (length == 0) { + return null; + } + GenericUrl url = new GenericUrl(uploadId); + HttpRequest httpRequest = + bigquery + .getRequestFactory() + .buildPutRequest(url, new ByteArrayContent(null, toWrite, toWriteOffset, length)); + httpRequest.setParser(bigquery.getObjectParser()); + long limit = destOffset + length; + StringBuilder range = new StringBuilder("bytes "); + range.append(destOffset).append('-').append(limit - 1).append('/'); + if (last) { + range.append(limit); + } else { + range.append('*'); + } + httpRequest.getHeaders().setContentRange(range.toString()); + int code; + String message; + IOException exception = null; + HttpResponse response = null; + try { + response = httpRequest.execute(); + code = response.getStatusCode(); + message = response.getStatusMessage(); + } catch (HttpResponseException ex) { + exception = ex; + code = ex.getStatusCode(); + message = ex.getStatusMessage(); + } + if (!last && code != HTTP_RESUME_INCOMPLETE + || last && !(code == HTTP_OK || code == HTTP_CREATED)) { + if (exception != null) { + throw exception; + } + throw new BigQueryException(code, message); + } + return last && response != null ? response.parseAs(Job.class) : null; + } + @Override public Policy getIamPolicy(String resourceId, Map options) { try { - GetIamPolicyRequest policyRequest = new GetIamPolicyRequest(); - if (null != Option.REQUESTED_POLICY_VERSION.getLong(options)) { - policyRequest = - policyRequest.setOptions( - new GetPolicyOptions() - .setRequestedPolicyVersion( - Option.REQUESTED_POLICY_VERSION.getLong(options).intValue())); - } - return bigquery - .tables() - .getIamPolicy(resourceId, policyRequest) - .setPrettyPrint(false) - .execute(); + return getIamPolicySkipExceptionTranslation(resourceId, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Policy getIamPolicySkipExceptionTranslation(String resourceId, Map options) + throws IOException { + validateRPC(); + GetIamPolicyRequest policyRequest = new GetIamPolicyRequest(); + if (null != Option.REQUESTED_POLICY_VERSION.getLong(options)) { + policyRequest = + policyRequest.setOptions( + new GetPolicyOptions() + .setRequestedPolicyVersion( + Option.REQUESTED_POLICY_VERSION.getLong(options).intValue())); + } + Bigquery.Tables.GetIamPolicy bqGetRequest = + bigquery.tables().getIamPolicy(resourceId, policyRequest).setPrettyPrint(false); + + bqGetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span getIamPolicy = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + getIamPolicy = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.getIamPolicy") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "GetIamPolicy") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + Policy bqGetResponse = bqGetRequest.execute(); + if (getIamPolicy != null) { + getIamPolicy.end(); + } + return bqGetResponse; + } + @Override public Policy setIamPolicy(String resourceId, Policy policy, Map options) { try { - SetIamPolicyRequest policyRequest = new SetIamPolicyRequest().setPolicy(policy); - return bigquery - .tables() - .setIamPolicy(resourceId, policyRequest) - .setPrettyPrint(false) - .execute(); + return setIamPolicySkipExceptionTranslation(resourceId, policy, options); } catch (IOException ex) { throw translate(ex); } } + @InternalApi("internal to java-bigquery") + public Policy setIamPolicySkipExceptionTranslation( + String resourceId, Policy policy, Map options) throws IOException { + validateRPC(); + SetIamPolicyRequest policyRequest = new SetIamPolicyRequest().setPolicy(policy); + Bigquery.Tables.SetIamPolicy bqSetRequest = + bigquery.tables().setIamPolicy(resourceId, policyRequest).setPrettyPrint(false); + + bqSetRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span setIamPolicy = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + setIamPolicy = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.setIamPolicy") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "SetIamPolicy") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + Policy bqSetResponse = bqSetRequest.execute(); + if (setIamPolicy != null) { + setIamPolicy.end(); + } + return bqSetResponse; + } + @Override public TestIamPermissionsResponse testIamPermissions( String resourceId, List permissions, Map options) { try { - TestIamPermissionsRequest permissionsRequest = - new TestIamPermissionsRequest().setPermissions(permissions); - return bigquery - .tables() - .testIamPermissions(resourceId, permissionsRequest) - .setPrettyPrint(false) - .execute(); + return testIamPermissionsSkipExceptionTranslation(resourceId, permissions, options); } catch (IOException ex) { throw translate(ex); } } + + public TestIamPermissionsResponse testIamPermissionsSkipExceptionTranslation( + String resourceId, List permissions, Map options) throws IOException { + validateRPC(); + TestIamPermissionsRequest permissionsRequest = + new TestIamPermissionsRequest().setPermissions(permissions); + Bigquery.Tables.TestIamPermissions bqTestRequest = + bigquery.tables().testIamPermissions(resourceId, permissionsRequest).setPrettyPrint(false); + + bqTestRequest + .getRequestHeaders() + .set("x-goog-otel-enabled", this.options.isOpenTelemetryTracingEnabled()); + + Span testIamPermissions = null; + if (this.options.isOpenTelemetryTracingEnabled() + && this.options.getOpenTelemetryTracer() != null) { + testIamPermissions = + this.options + .getOpenTelemetryTracer() + .spanBuilder("com.google.cloud.bigquery.BigQueryRpc.setIamPolicy") + .setSpanKind(SpanKind.CLIENT) + .setAttribute("bq.rpc.service", "TableService") + .setAttribute("bq.rpc.method", "SetIamPolicy") + .setAttribute("bq.rpc.system", "http") + .setAllAttributes(otelAttributesFromOptions(options)) + .startSpan(); + } + + TestIamPermissionsResponse bqTestResponse = bqTestRequest.execute(); + if (testIamPermissions != null) { + testIamPermissions.end(); + } + return bqTestResponse; + } + + private static Attributes otelAttributesFromOptions(Map options) { + Attributes attributes = Attributes.builder().build(); + for (Map.Entry entry : options.entrySet()) { + attributes.toBuilder().put(entry.getKey().toString(), entry.getValue().toString()); + } + return attributes; + } } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java index 784ca984fa..d195dc245e 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java @@ -24,10 +24,10 @@ import com.google.cloud.http.HttpTransportOptions; import java.io.IOException; import java.io.InputStream; +import java.time.Duration; import java.util.UUID; import java.util.logging.Level; import java.util.logging.Logger; -import org.threeten.bp.Duration; /** * Utility to create a remote BigQuery configuration for testing. BigQuery options can be obtained @@ -45,7 +45,7 @@ public class RemoteBigQueryHelper { private static final String MODEL_NAME_PREFIX = "model_"; private static final String ROUTINE_NAME_PREFIX = "routine_"; private final BigQueryOptions options; - private static final int connectTimeout = 60000; + private static final int CONNECT_TIMEOUT_IN_MS = 60000; private RemoteBigQueryHelper(BigQueryOptions options) { this.options = options; @@ -95,10 +95,9 @@ public static RemoteBigQueryHelper create(String projectId, InputStream keyStrea try { HttpTransportOptions transportOptions = BigQueryOptions.getDefaultHttpTransportOptions(); transportOptions = - transportOptions - .toBuilder() - .setConnectTimeout(connectTimeout) - .setReadTimeout(connectTimeout) + transportOptions.toBuilder() + .setConnectTimeout(CONNECT_TIMEOUT_IN_MS) + .setReadTimeout(CONNECT_TIMEOUT_IN_MS) .build(); BigQueryOptions bigqueryOptions = BigQueryOptions.newBuilder() @@ -121,36 +120,48 @@ public static RemoteBigQueryHelper create(String projectId, InputStream keyStrea * credentials. */ public static RemoteBigQueryHelper create() { + return create(BigQueryOptions.newBuilder()); + } + + /** + * Creates a {@code RemoteBigQueryHelper} object using default project id and authentication + * credentials. + * + * @param bigqueryOptionsBuilder Custom BigqueryOptions.Builder with some pre-defined settings + */ + public static RemoteBigQueryHelper create(BigQueryOptions.Builder bigqueryOptionsBuilder) { HttpTransportOptions transportOptions = BigQueryOptions.getDefaultHttpTransportOptions(); transportOptions = - transportOptions - .toBuilder() - .setConnectTimeout(connectTimeout) - .setReadTimeout(connectTimeout) + transportOptions.toBuilder() + .setConnectTimeout(CONNECT_TIMEOUT_IN_MS) + .setReadTimeout(CONNECT_TIMEOUT_IN_MS) .build(); - BigQueryOptions bigqueryOptions = - BigQueryOptions.newBuilder() + BigQueryOptions.Builder builder = + bigqueryOptionsBuilder .setRetrySettings(retrySettings()) - .setTransportOptions(transportOptions) - .build(); - return new RemoteBigQueryHelper(bigqueryOptions); + .setTransportOptions(transportOptions); + return new RemoteBigQueryHelper(builder.build()); } + // Opt to keep these settings a small as possible to minimize the total test time. + // These values can be adjusted per test case, but these serve as default values. private static RetrySettings retrySettings() { - double retryDelayMultiplier = 1.0; + double backoffMultiplier = 1.5; int maxAttempts = 10; - long initialRetryDelay = 250L; - long maxRetryDelay = 30000L; - long totalTimeOut = 120000L; + long initialRetryDelayMs = 100L; // 0.1s initial retry delay + long maxRetryDelayMs = 1000L; // 1s max retry delay between retry + long initialRpcTimeoutMs = 1000L; // 1s initial rpc duration + long maxRpcTimeoutMs = 2000L; // 2s max rpc duration + long totalTimeoutMs = 3000L; // 3s total timeout return RetrySettings.newBuilder() .setMaxAttempts(maxAttempts) - .setMaxRetryDelay(Duration.ofMillis(maxRetryDelay)) - .setTotalTimeout(Duration.ofMillis(totalTimeOut)) - .setInitialRetryDelay(Duration.ofMillis(initialRetryDelay)) - .setRetryDelayMultiplier(retryDelayMultiplier) - .setInitialRpcTimeout(Duration.ofMillis(totalTimeOut)) - .setRpcTimeoutMultiplier(retryDelayMultiplier) - .setMaxRpcTimeout(Duration.ofMillis(totalTimeOut)) + .setTotalTimeoutDuration(Duration.ofMillis(totalTimeoutMs)) + .setInitialRetryDelayDuration(Duration.ofMillis(initialRetryDelayMs)) + .setMaxRetryDelayDuration(Duration.ofMillis(maxRetryDelayMs)) + .setRetryDelayMultiplier(backoffMultiplier) + .setInitialRpcTimeoutDuration(Duration.ofMillis(initialRpcTimeoutMs)) + .setMaxRpcTimeoutDuration(Duration.ofMillis(maxRpcTimeoutMs)) + .setRpcTimeoutMultiplier(backoffMultiplier) .build(); } diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/package-info.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/package-info.java index 6e5b32419d..c2e6c03f50 100644 --- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/package-info.java +++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/package-info.java @@ -17,7 +17,14 @@ /** * A testing helper for Google BigQuery. * - *

    A simple usage example: + *

    A simple usage example: 1. Create a test Google Cloud project. + * + *

    2. Download a JSON service account credentials file from the Google Developer's Console. + * + *

    3. Create a RemoteBigQueryHelper object using your project ID and JSON key. Here is an example + * that uses the RemoteBigQueryHelper to create a dataset. + * + *

    4. Run tests. * *

    Before the test: * @@ -33,9 +40,5 @@ *

    {@code
      * RemoteBigQueryHelper.forceDelete(bigquery, DATASET);
      * }
    - * - * @see - * Google Cloud Java tools for testing */ package com.google.cloud.bigquery.testing; diff --git a/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/native-image.properties b/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/native-image.properties new file mode 100644 index 0000000000..f7cce6dec2 --- /dev/null +++ b/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/native-image.properties @@ -0,0 +1 @@ +Args = --add-opens=java.base/java.nio=ALL-UNNAMED \ No newline at end of file diff --git a/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/reflect-config.json b/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/reflect-config.json new file mode 100644 index 0000000000..921440df69 --- /dev/null +++ b/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/reflect-config.json @@ -0,0 +1,134 @@ +[ + { + "name":"io.netty.buffer.AbstractByteBufAllocator", + "queryAllDeclaredMethods":true + }, + { + "name":"io.netty.buffer.PooledByteBufAllocator", + "fields":[{"name":"directArenas"}] + }, + { + "name":"io.netty.util.internal.shaded.org.jctools.queues.MpscChunkedArrayQueue", + "queryAllDeclaredMethods":true, + "allDeclaredFields":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueConsumerIndexField", + "fields":[{"name":"consumerIndex"}] + }, + { + "name":"io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerIndexField", + "fields":[{"name":"producerIndex"}] + }, + { + "name":"io.netty.util.internal.shaded.org.jctools.queues.MpscArrayQueueProducerLimitField", + "fields":[{"name":"producerLimit"}] + }, + { + "name":"java.nio.DirectByteBuffer", + "methods":[{"name":"","parameterTypes":["long","int"] }] + }, + + { + "name":"org.apache.arrow.memory.BaseAllocator", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.BaseAllocator$Config", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.DefaultAllocationManagerOption", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.netty.NettyAllocationManager$1", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.netty.DefaultAllocationManagerFactory", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.DefaultAllocationManagerFactory", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.RootAllocator", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.memory.DefaultAllocationManagerFactory", + "allDeclaredFields":true, + "queryAllDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.vector.types.pojo.ArrowType", + "allDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.vector.types.pojo.ArrowType$Int", + "allDeclaredFields":true, + "queryAllDeclaredMethods":true, + "queryAllDeclaredConstructors":true + }, + { + "name":"org.apache.arrow.vector.types.pojo.ArrowType$PrimitiveType", + "allDeclaredFields":true, + "queryAllDeclaredMethods":true + }, + { + "name": "org.apache.arrow.vector.types.pojo.DictionaryEncoding", + "allDeclaredFields": true + }, + { + "name": "org.apache.arrow.vector.types.pojo.Field", + "allDeclaredFields": true + }, + { + "name": "org.apache.arrow.vector.types.pojo.Schema", + "allDeclaredFields": true + }, + { + "name":"io.netty.buffer.AbstractReferenceCountedByteBuf", + "fields":[{"name":"refCnt"}] + }, + { + "name":"io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields", + "fields":[{"name":"producerLimit"}] + }, + { + "name":"io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields", + "fields":[{"name":"consumerIndex"}] + }, + { + "name":"io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields", + "fields":[{"name":"producerIndex"}] + } +] \ No newline at end of file diff --git a/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/resource-config.json b/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/resource-config.json new file mode 100644 index 0000000000..c4710244e2 --- /dev/null +++ b/google-cloud-bigquery/src/main/resources/META-INF/native-image/com.google.cloud/google-cloud-bigquery/resource-config.json @@ -0,0 +1,26 @@ +{ + "resources":{ + "includes":[ + { + "pattern":"\\Qorg/apache/arrow/memory/DefaultAllocationManagerFactory.class\\E" + }, + { + "pattern":"\\Qorg/apache/arrow/memory/netty/DefaultAllocationManagerFactory.class\\E" + }, + { + "pattern":"\\Qorg/apache/arrow/memory/unsafe/DefaultAllocationManagerFactory.class\\E" + } + ] + }, + "globs":[ + { + "glob": "org/apache/arrow/memory/DefaultAllocationManagerFactory.class" + }, + { + "glob": "org/apache/arrow/memory/netty/DefaultAllocationManagerFactory.class" + }, + { + "glob": "org/apache/arrow/memory/unsafe/DefaultAllocationManagerFactory.class" + } + ] +} \ No newline at end of file diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java index 7368033918..f7bed53ba1 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java @@ -16,23 +16,38 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.api.services.bigquery.model.Dataset; +import com.google.cloud.bigquery.Acl.DatasetAclEntity; import com.google.cloud.bigquery.Acl.Domain; import com.google.cloud.bigquery.Acl.Entity; import com.google.cloud.bigquery.Acl.Entity.Type; +import com.google.cloud.bigquery.Acl.Expr; import com.google.cloud.bigquery.Acl.Group; import com.google.cloud.bigquery.Acl.IamMember; import com.google.cloud.bigquery.Acl.Role; import com.google.cloud.bigquery.Acl.User; import com.google.cloud.bigquery.Acl.View; -import org.junit.Test; +import com.google.common.collect.ImmutableList; +import java.util.List; +import org.junit.jupiter.api.Test; -public class AclTest { +class AclTest { @Test - public void testDomainEntity() { + void testDatasetEntity() { + DatasetId datasetId = DatasetId.of("dataset"); + List targetTypes = ImmutableList.of("VIEWS"); + DatasetAclEntity entity = new DatasetAclEntity(datasetId, targetTypes); + assertEquals(datasetId, entity.getId()); + assertEquals(targetTypes, entity.getTargetTypes()); + Dataset.Access pb = entity.toPb(); + assertEquals(entity, Entity.fromPb(pb)); + } + + @Test + void testDomainEntity() { Domain entity = new Domain("d1"); assertEquals("d1", entity.getDomain()); assertEquals(Type.DOMAIN, entity.getType()); @@ -41,7 +56,7 @@ public void testDomainEntity() { } @Test - public void testGroupEntity() { + void testGroupEntity() { Group entity = new Group("g1"); assertEquals("g1", entity.getIdentifier()); assertEquals(Type.GROUP, entity.getType()); @@ -50,7 +65,7 @@ public void testGroupEntity() { } @Test - public void testSpecialGroupEntity() { + void testSpecialGroupEntity() { Group entity = Group.ofAllAuthenticatedUsers(); assertEquals("allAuthenticatedUsers", entity.getIdentifier()); Dataset.Access pb = entity.toPb(); @@ -70,7 +85,7 @@ public void testSpecialGroupEntity() { } @Test - public void testUserEntity() { + void testUserEntity() { User entity = new User("u1"); assertEquals("u1", entity.getEmail()); assertEquals(Type.USER, entity.getType()); @@ -79,7 +94,7 @@ public void testUserEntity() { } @Test - public void testViewEntity() { + void testViewEntity() { TableId viewId = TableId.of("project", "dataset", "view"); View entity = new View(viewId); assertEquals(viewId, entity.getId()); @@ -89,7 +104,7 @@ public void testViewEntity() { } @Test - public void testRoutineEntity() { + void testRoutineEntity() { RoutineId routineId = RoutineId.of("project", "dataset", "routine"); Acl.Routine entity = new Acl.Routine(routineId); assertEquals(routineId, entity.getId()); @@ -99,7 +114,7 @@ public void testRoutineEntity() { } @Test - public void testIamMemberEntity() { + void testIamMemberEntity() { IamMember entity = new IamMember("member1"); assertEquals("member1", entity.getIamMember()); Dataset.Access pb = entity.toPb(); @@ -107,7 +122,7 @@ public void testIamMemberEntity() { } @Test - public void testOf() { + void testOf() { Acl acl = Acl.of(Group.ofAllAuthenticatedUsers(), Role.READER); assertEquals(Group.ofAllAuthenticatedUsers(), acl.getEntity()); assertEquals(Role.READER, acl.getRole()); @@ -122,4 +137,13 @@ public void testOf() { assertEquals(routine, acl.getEntity()); assertEquals(null, acl.getRole()); } + + @Test + void testOfWithCondition() { + Expr expr = new Expr("expression", "title", "description", "location"); + Acl acl = Acl.of(Group.ofAllAuthenticatedUsers(), Role.READER, expr); + Dataset.Access pb = acl.toPb(); + assertEquals(acl, Acl.fromPb(pb)); + assertEquals(acl.getCondition(), expr); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AnnotationsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AnnotationsTest.java new file mode 100644 index 0000000000..ad475c7dce --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AnnotationsTest.java @@ -0,0 +1,72 @@ +/* + * Copyright 2018 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; + +import com.google.api.client.util.Data; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.Test; + +public class AnnotationsTest { + @Test + void testFromUser() { + assertThat(Annotations.fromUser(null).userMap()).isNull(); + + HashMap user = new HashMap<>(); + assertThat(Annotations.fromUser(user).userMap()).isEmpty(); + + user.put("a", "b"); + Annotations annotations = Annotations.fromUser(user); + assertThat(annotations.userMap()).containsExactly("a", "b"); + + // Changing map afterwards does not change the annotation. + user.put("c", "d"); + assertThat(annotations.userMap()).containsExactly("a", "b"); + } + + @Test + void testFromToPb() { + assertThat(Annotations.fromPb(null).toPb()).isNull(); + + HashMap pb = new HashMap<>(); + assertThat(Annotations.fromPb(pb).toPb()).isNull(); + + pb.put("a", "b"); + assertThat(Annotations.fromPb(pb).toPb()).isEqualTo(pb); + + pb.put("c", Data.NULL_STRING); + assertThat(Annotations.fromPb(pb).toPb()).isEqualTo(pb); + + Map jsonNullMap = Data.nullOf(HashMap.class); + assertThat(Data.isNull(Annotations.fromPb(jsonNullMap).toPb())).isTrue(); + } + + @Test + void testNullKey() { + assertThrows( + IllegalArgumentException.class, + () -> Annotations.fromUser(Collections.singletonMap((String) null, "foo"))); + + assertThrows( + IllegalArgumentException.class, + () -> Annotations.fromPb(Collections.singletonMap((String) null, "foo"))); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AvroOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AvroOptionsTest.java new file mode 100644 index 0000000000..840ae24ba8 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AvroOptionsTest.java @@ -0,0 +1,56 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +public class AvroOptionsTest { + + private static final Boolean USE_AVRO_LOGICAL_TYPES = true; + private static final AvroOptions AVRO_OPTIONS = + AvroOptions.newBuilder().setUseAvroLogicalTypes(USE_AVRO_LOGICAL_TYPES).build(); + + @Test + void testToBuilder() { + compareAvroOptions(AVRO_OPTIONS, AVRO_OPTIONS.toBuilder().build()); + AvroOptions avroOptions = AVRO_OPTIONS.toBuilder().setUseAvroLogicalTypes(false).build(); + assertEquals(false, avroOptions.useAvroLogicalTypes()); + avroOptions = avroOptions.toBuilder().setUseAvroLogicalTypes(true).build(); + compareAvroOptions(AVRO_OPTIONS, avroOptions); + } + + @Test + void testBuilder() { + assertEquals(FormatOptions.AVRO, AVRO_OPTIONS.getType()); + assertEquals(USE_AVRO_LOGICAL_TYPES, AVRO_OPTIONS.useAvroLogicalTypes()); + } + + @Test + void testToAndFromPb() { + compareAvroOptions(AVRO_OPTIONS, AvroOptions.fromPb(AVRO_OPTIONS.toPb())); + AvroOptions avroOptions = + AvroOptions.newBuilder().setUseAvroLogicalTypes(USE_AVRO_LOGICAL_TYPES).build(); + compareAvroOptions(avroOptions, AvroOptions.fromPb(avroOptions.toPb())); + } + + private void compareAvroOptions(AvroOptions expected, AvroOptions value) { + assertEquals(expected, value); + assertEquals(expected.useAvroLogicalTypes(), value.useAvroLogicalTypes()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigLakeConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigLakeConfigurationTest.java new file mode 100644 index 0000000000..66fcd7c6b4 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigLakeConfigurationTest.java @@ -0,0 +1,80 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +class BigLakeConfigurationTest { + + private static final String STORAGE_URI = "gs://storage-uri"; + private static final String FILE_FORMAT = "PARQUET"; + private static final String TABLE_FORMAT = "ICEBERG"; + private static final String CONNECTION_ID = "us.test-connection"; + + private static final BigLakeConfiguration BIG_LAKE_CONFIGURATION = + BigLakeConfiguration.newBuilder() + .setStorageUri(STORAGE_URI) + .setFileFormat(FILE_FORMAT) + .setTableFormat(TABLE_FORMAT) + .setConnectionId(CONNECTION_ID) + .build(); + private static final com.google.api.services.bigquery.model.BigLakeConfiguration + BIG_LAKE_CONFIGURATION_PB = + new com.google.api.services.bigquery.model.BigLakeConfiguration() + .setStorageUri(STORAGE_URI) + .setFileFormat(FILE_FORMAT) + .setTableFormat(TABLE_FORMAT) + .setConnectionId(CONNECTION_ID); + + @Test + void testToBuilder() { + assertEquals(STORAGE_URI, BIG_LAKE_CONFIGURATION.getStorageUri()); + assertEquals(FILE_FORMAT, BIG_LAKE_CONFIGURATION.getFileFormat()); + assertEquals(TABLE_FORMAT, BIG_LAKE_CONFIGURATION.getTableFormat()); + assertEquals(CONNECTION_ID, BIG_LAKE_CONFIGURATION.getConnectionId()); + } + + @Test + void testToPb() { + assertBigLakeConfiguration(BIG_LAKE_CONFIGURATION_PB, BIG_LAKE_CONFIGURATION.toPb()); + } + + @Test + void testFromPb() { + assertBigLakeConfiguration( + BIG_LAKE_CONFIGURATION, BigLakeConfiguration.fromPb(BIG_LAKE_CONFIGURATION_PB)); + } + + private static void assertBigLakeConfiguration( + BigLakeConfiguration expected, BigLakeConfiguration actual) { + assertEquals(expected.getConnectionId(), actual.getConnectionId()); + assertEquals(expected.getTableFormat(), actual.getTableFormat()); + assertEquals(expected.getStorageUri(), actual.getStorageUri()); + assertEquals(expected.getFileFormat(), actual.getFileFormat()); + } + + private static void assertBigLakeConfiguration( + com.google.api.services.bigquery.model.BigLakeConfiguration expected, + com.google.api.services.bigquery.model.BigLakeConfiguration actual) { + assertEquals(expected.getConnectionId(), actual.getConnectionId()); + assertEquals(expected.getTableFormat(), actual.getTableFormat()); + assertEquals(expected.getStorageUri(), actual.getStorageUri()); + assertEquals(expected.getFileFormat(), actual.getFileFormat()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryErrorTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryErrorTest.java index 7cd737cf4b..d618214e29 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryErrorTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryErrorTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class BigQueryErrorTest { @@ -32,7 +32,7 @@ public class BigQueryErrorTest { new BigQueryError(REASON, LOCATION, MESSAGE); @Test - public void testConstructor() { + void testConstructor() { assertEquals(REASON, ERROR.getReason()); assertEquals(LOCATION, ERROR.getLocation()); assertEquals(DEBUG_INFO, ERROR.getDebugInfo()); @@ -44,7 +44,7 @@ public void testConstructor() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareBigQueryError(ERROR, BigQueryError.fromPb(ERROR.toPb())); compareBigQueryError(ERROR_INCOMPLETE, BigQueryError.fromPb(ERROR_INCOMPLETE.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java index 41915e2834..7254ede1bc 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java @@ -16,12 +16,15 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -29,18 +32,23 @@ import com.google.api.client.http.HttpHeaders; import com.google.api.client.http.HttpResponseException; import com.google.cloud.BaseServiceException; +import com.google.cloud.ExceptionHandler; import com.google.cloud.RetryHelper.RetryHelperException; +import com.google.cloud.bigquery.spi.v2.BigQueryRpc; +import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import java.io.IOException; import java.net.SocketTimeoutException; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class BigQueryExceptionTest { @Test - public void testBigQueryException() { + void testBigQueryException() { BigQueryException exception = new BigQueryException(500, "message"); assertEquals(500, exception.getCode()); assertEquals("message", exception.getMessage()); @@ -129,7 +137,7 @@ public void testBigQueryException() { } @Test - public void testTranslateAndThrow() throws Exception { + void testTranslateAndThrow() throws Exception { Exception cause = new BigQueryException(503, "message"); RetryHelperException exceptionMock = mock(RetryHelperException.class); when(exceptionMock.getCause()).thenReturn(cause); @@ -158,4 +166,87 @@ public void testTranslateAndThrow() throws Exception { verify(exceptionMock, times(2)).getCause(); } } + + @Test + void testDefaultExceptionHandler() throws java.io.IOException { + BigQueryOptions defaultOptions = + BigQueryOptions.newBuilder().setProjectId("project-id").build(); + DatasetInfo info = DatasetInfo.newBuilder("dataset").build(); + Dataset dataset = null; + + final com.google.api.services.bigquery.model.Dataset datasetPb = + info.setProjectId(defaultOptions.getProjectId()).toPb(); + final Map optionsMap = new HashMap<>(); + + BigQueryOptions mockOptions = spy(defaultOptions); + HttpBigQueryRpc bigQueryRpcMock = mock(HttpBigQueryRpc.class); + doReturn(bigQueryRpcMock).when(mockOptions).getBigQueryRpcV2(); + // java.net.SocketException is retry-able in the default exception handler. + doThrow(java.net.SocketException.class) + .when(bigQueryRpcMock) + .createSkipExceptionTranslation(datasetPb, optionsMap); + + BigQuery bigquery = mockOptions.getService(); + try { + dataset = bigquery.create(info); + } catch (BigQueryException e) { + assertEquals(e.getCause().getClass(), java.net.SocketException.class); + assertNull(dataset); + } finally { + verify(bigQueryRpcMock, times(6)).createSkipExceptionTranslation(datasetPb, optionsMap); + } + } + + @Test + void testCustomExceptionHandler() throws java.io.IOException { + BigQueryOptions defaultOptions = + BigQueryOptions.newBuilder() + .setProjectId("project-id") + .setResultRetryAlgorithm( + ExceptionHandler.newBuilder() + .abortOn(RuntimeException.class) + .retryOn(java.util.EmptyStackException.class) + .addInterceptors(BigQueryBaseService.EXCEPTION_HANDLER_INTERCEPTOR) + .build()) + .build(); + DatasetInfo info = DatasetInfo.newBuilder("dataset").build(); + Dataset dataset = null; + + final com.google.api.services.bigquery.model.Dataset datasetPb = + info.setProjectId(defaultOptions.getProjectId()).toPb(); + final Map optionsMap = new HashMap<>(); + + BigQueryOptions mockOptions = spy(defaultOptions); + HttpBigQueryRpc bigQueryRpcRetryMock = mock(HttpBigQueryRpc.class); + doReturn(bigQueryRpcRetryMock).when(mockOptions).getBigQueryRpcV2(); + doThrow(java.util.EmptyStackException.class) + .when(bigQueryRpcRetryMock) + .createSkipExceptionTranslation(datasetPb, optionsMap); + + BigQuery bigquery = mockOptions.getService(); + try { + dataset = bigquery.create(info); + } catch (BigQueryException e) { + assertEquals(e.getCause().getClass(), java.util.EmptyStackException.class); + assertNull(dataset); + } finally { + verify(bigQueryRpcRetryMock, times(6)).createSkipExceptionTranslation(datasetPb, optionsMap); + } + + BigQueryOptions mockOptionsAbort = spy(defaultOptions); + HttpBigQueryRpc bigQueryRpcAbortMock = mock(HttpBigQueryRpc.class); + doReturn(bigQueryRpcAbortMock).when(mockOptionsAbort).getBigQueryRpcV2(); + doThrow(RuntimeException.class) + .when(bigQueryRpcAbortMock) + .createSkipExceptionTranslation(datasetPb, optionsMap); + bigquery = mockOptionsAbort.getService(); + try { + dataset = bigquery.create(info); + } catch (BigQueryException e) { + assertEquals(e.getCause().getClass(), RuntimeException.class); + assertNull(dataset); + } finally { + verify(bigQueryRpcAbortMock, times(1)).createSkipExceptionTranslation(datasetPb, optionsMap); + } + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java index 7c970313fb..20a6ef679e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java @@ -16,18 +16,21 @@ package com.google.cloud.bigquery; +import static com.google.cloud.bigquery.BigQuery.JobField.STATISTICS; +import static com.google.cloud.bigquery.BigQuery.JobField.USER_EMAIL; +import static com.google.cloud.bigquery.BigQueryImpl.optionMap; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; -import static org.mockito.Mockito.any; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.ArgumentMatchers.nullable; import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; @@ -37,6 +40,7 @@ import com.google.api.services.bigquery.model.ErrorProto; import com.google.api.services.bigquery.model.GetQueryResultsResponse; import com.google.api.services.bigquery.model.JobConfigurationQuery; +import com.google.api.services.bigquery.model.JobStatistics; import com.google.api.services.bigquery.model.QueryRequest; import com.google.api.services.bigquery.model.TableCell; import com.google.api.services.bigquery.model.TableDataInsertAllRequest; @@ -44,12 +48,16 @@ import com.google.api.services.bigquery.model.TableDataList; import com.google.api.services.bigquery.model.TableRow; import com.google.cloud.Policy; +import com.google.cloud.RetryOption; import com.google.cloud.ServiceOptions; import com.google.cloud.Tuple; +import com.google.cloud.bigquery.BigQuery.DatasetOption; +import com.google.cloud.bigquery.BigQuery.JobOption; import com.google.cloud.bigquery.BigQuery.QueryResultsOption; import com.google.cloud.bigquery.InsertAllRequest.RowToInsert; import com.google.cloud.bigquery.spi.BigQueryRpcFactory; import com.google.cloud.bigquery.spi.v2.BigQueryRpc; +import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import com.google.common.base.Function; import com.google.common.base.Supplier; import com.google.common.collect.ImmutableList; @@ -59,18 +67,23 @@ import com.google.common.collect.Maps; import java.io.IOException; import java.math.BigInteger; +import java.net.ConnectException; +import java.net.UnknownHostException; import java.util.Collections; import java.util.List; import java.util.Map; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class BigQueryImplTest { private static final String PROJECT = "project"; @@ -101,16 +114,22 @@ public class BigQueryImplTest { Acl.of(Acl.Group.ofAllAuthenticatedUsers(), Acl.Role.READER), Acl.of(new Acl.View(TableId.of(PROJECT, "dataset", "table")))); private static final DatasetInfo DATASET_INFO = - DatasetInfo.newBuilder(DATASET).setAcl(ACCESS_RULES).setDescription("description").build(); + DatasetInfo.newBuilder(DATASET) + .setAcl(ACCESS_RULES) + .setDescription("description") + .setLocation(LOCATION) + .build(); private static final DatasetInfo DATASET_INFO_WITH_PROJECT = DatasetInfo.newBuilder(PROJECT, DATASET) .setAcl(ACCESS_RULES_WITH_PROJECT) .setDescription("description") + .setLocation(LOCATION) .build(); private static final DatasetInfo OTHER_DATASET_INFO = DatasetInfo.newBuilder(PROJECT, OTHER_DATASET) .setAcl(ACCESS_RULES) .setDescription("other description") + .setLocation(LOCATION) .build(); private static final TableId TABLE_ID = TableId.of(DATASET, TABLE); private static final TableId OTHER_TABLE_ID = TableId.of(PROJECT, DATASET, OTHER_TABLE); @@ -140,6 +159,7 @@ public class BigQueryImplTest { .setField("timestampField"); private static final TimePartitioning TIME_PARTITIONING_NULL_TYPE = TimePartitioning.fromPb(PB_TIMEPARTITIONING); + private static final ImmutableMap LABELS = ImmutableMap.of("key", "value"); private static final StandardTableDefinition TABLE_DEFINITION_WITH_PARTITIONING = StandardTableDefinition.newBuilder() .setSchema(TABLE_SCHEMA) @@ -163,6 +183,8 @@ public class BigQueryImplTest { TableInfo.of(TABLE_ID, TABLE_DEFINITION_WITH_RANGE_PARTITIONING); private static final TableInfo TABLE_INFO = TableInfo.of(TABLE_ID, TABLE_DEFINITION); private static final TableInfo OTHER_TABLE_INFO = TableInfo.of(OTHER_TABLE_ID, TABLE_DEFINITION); + private static final TableInfo OTHER_TABLE_WITH_LABELS_INFO = + TableInfo.newBuilder(OTHER_TABLE_ID, TABLE_DEFINITION).setLabels(LABELS).build(); private static final TableInfo TABLE_INFO_WITH_PROJECT = TableInfo.of(TABLE_ID_WITH_PROJECT, TABLE_DEFINITION); private static final TableInfo MODEL_TABLE_INFO_WITH_PROJECT = @@ -422,12 +444,11 @@ public class BigQueryImplTest { BigQueryRpc.Option.START_INDEX, 0L); // Job options - private static final BigQuery.JobOption JOB_OPTION_FIELDS = - BigQuery.JobOption.fields(BigQuery.JobField.USER_EMAIL); + private static final JobOption JOB_OPTION_FIELDS = JobOption.fields(USER_EMAIL); // Job list options private static final BigQuery.JobListOption JOB_LIST_OPTION_FIELD = - BigQuery.JobListOption.fields(BigQuery.JobField.STATISTICS); + BigQuery.JobListOption.fields(STATISTICS); private static final BigQuery.JobListOption JOB_LIST_ALL_USERS = BigQuery.JobListOption.allUsers(); private static final BigQuery.JobListOption JOB_LIST_STATE_FILTER = @@ -508,11 +529,12 @@ public class BigQueryImplTest { .setEtag(ETAG) .setVersion(1) .build(); - private BigQueryOptions options; private BigQueryRpcFactory rpcFactoryMock; - private BigQueryRpc bigqueryRpcMock; + private HttpBigQueryRpc bigqueryRpcMock; private BigQuery bigquery; + private static final String RATE_LIMIT_ERROR_MSG = + "Job exceeded rate limits: Your table exceeded quota for table update operations. For more information, see https://cloud.google.com/bigquery/docs/troubleshoot-quotas"; @Captor private ArgumentCaptor> capturedOptions; @Captor private ArgumentCaptor jobCapture; @@ -542,36 +564,37 @@ private BigQueryOptions createBigQueryOptionsForProjectWithLocation( .build(); } - @Before - public void setUp() { + @BeforeEach + void setUp() { rpcFactoryMock = mock(BigQueryRpcFactory.class); - bigqueryRpcMock = mock(BigQueryRpc.class); + bigqueryRpcMock = mock(HttpBigQueryRpc.class); when(rpcFactoryMock.create(any(BigQueryOptions.class))).thenReturn(bigqueryRpcMock); options = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); } @Test - public void testGetOptions() { + void testGetOptions() { bigquery = options.getService(); assertSame(options, bigquery.getOptions()); } @Test - public void testCreateDataset() { + void testCreateDataset() throws IOException { DatasetInfo datasetInfo = DATASET_INFO.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.create(datasetInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.createSkipExceptionTranslation(datasetInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(datasetInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Dataset dataset = bigquery.create(datasetInfo); assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(datasetInfo)), dataset); - verify(bigqueryRpcMock).create(datasetInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).createSkipExceptionTranslation(datasetInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testCreateDatasetWithSelectedFields() { - when(bigqueryRpcMock.create(eq(DATASET_INFO_WITH_PROJECT.toPb()), capturedOptions.capture())) + void testCreateDatasetWithSelectedFields() throws IOException { + when(bigqueryRpcMock.createSkipExceptionTranslation( + eq(DATASET_INFO_WITH_PROJECT.toPb()), capturedOptions.capture())) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.create(DATASET_INFO, DATASET_OPTION_FIELDS); @@ -582,74 +605,94 @@ public void testCreateDatasetWithSelectedFields() { assertEquals(28, selector.length()); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock).create(eq(DATASET_INFO_WITH_PROJECT.toPb()), capturedOptions.capture()); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation( + eq(DATASET_INFO_WITH_PROJECT.toPb()), capturedOptions.capture()); } @Test - public void testGetDataset() { - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + void testCreateDatasetWithAccessPolicy() throws IOException { + DatasetInfo datasetInfo = DATASET_INFO.setProjectId(OTHER_PROJECT); + DatasetOption datasetOption = DatasetOption.accessPolicyVersion(3); + when(bigqueryRpcMock.createSkipExceptionTranslation( + datasetInfo.toPb(), optionMap(datasetOption))) + .thenReturn(datasetInfo.toPb()); + BigQueryOptions bigQueryOptions = + createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); + bigquery = bigQueryOptions.getService(); + Dataset dataset = bigquery.create(datasetInfo, datasetOption); + assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(datasetInfo)), dataset); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation(datasetInfo.toPb(), optionMap(datasetOption)); + } + + @Test + void testGetDataset() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.getDataset(DATASET); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetNotFoundWhenThrowIsDisabled() { - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + void testGetDatasetNotFoundWhenThrowIsDisabled() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); options.setThrowNotFound(false); bigquery = options.getService(); Dataset dataset = bigquery.getDataset(DATASET); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetNotFoundWhenThrowIsEnabled() { - when(bigqueryRpcMock.getDataset(PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS)) - .thenReturn(null) + void testGetDatasetNotFoundWhenThrowIsEnabled() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation( + PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, "Dataset not found")); options.setThrowNotFound(true); bigquery = options.getService(); - try { - bigquery.getDataset("dataset-not-found"); - Assert.fail(); - } catch (BigQueryException ex) { - Assert.assertNotNull(ex.getMessage()); - } - verify(bigqueryRpcMock).getDataset(PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS); + BigQueryException ex = + Assertions.assertThrows( + BigQueryException.class, () -> bigquery.getDataset("dataset-not-found")); + Assertions.assertNotNull(ex.getMessage()); + verify(bigqueryRpcMock) + .getDatasetSkipExceptionTranslation(PROJECT, "dataset-not-found", EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetFromDatasetId() { - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + void testGetDatasetFromDatasetId() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.getDataset(DatasetId.of(DATASET)); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetFromDatasetIdWithProject() { + void testGetDatasetFromDatasetIdWithProject() throws IOException { DatasetInfo datasetInfo = DATASET_INFO.setProjectId(OTHER_PROJECT); DatasetId datasetId = DatasetId.of(OTHER_PROJECT, DATASET); - when(bigqueryRpcMock.getDataset(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation( + OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenReturn(datasetInfo.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.getDataset(datasetId); assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(datasetInfo)), dataset); - verify(bigqueryRpcMock).getDataset(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getDatasetSkipExceptionTranslation(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testGetDatasetWithSelectedFields() { - when(bigqueryRpcMock.getDataset(eq(PROJECT), eq(DATASET), capturedOptions.capture())) + void testGetDatasetWithSelectedFields() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation( + eq(PROJECT), eq(DATASET), capturedOptions.capture())) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.getDataset(DATASET, DATASET_OPTION_FIELDS); @@ -660,11 +703,12 @@ public void testGetDatasetWithSelectedFields() { assertEquals(28, selector.length()); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock).getDataset(eq(PROJECT), eq(DATASET), capturedOptions.capture()); + verify(bigqueryRpcMock) + .getDatasetSkipExceptionTranslation(eq(PROJECT), eq(DATASET), capturedOptions.capture()); } @Test - public void testListDatasets() { + void testListDatasets() throws IOException { bigquery = options.getService(); ImmutableList datasetList = ImmutableList.of( @@ -672,16 +716,17 @@ public void testListDatasets() { new Dataset(bigquery, new DatasetInfo.BuilderImpl(OTHER_DATASET_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listDatasets(PROJECT, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listDatasetsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listDatasets(); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals( datasetList.toArray(), Iterables.toArray(page.getValues(), DatasetInfo.class)); - verify(bigqueryRpcMock).listDatasets(PROJECT, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listDatasetsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS); } @Test - public void testListDatasetsWithProjects() { + void testListDatasetsWithProjects() throws IOException { bigquery = options.getService(); ImmutableList datasetList = ImmutableList.of( @@ -689,30 +734,32 @@ public void testListDatasetsWithProjects() { bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO.setProjectId(OTHER_PROJECT)))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listDatasets(OTHER_PROJECT, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listDatasetsSkipExceptionTranslation(OTHER_PROJECT, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listDatasets(OTHER_PROJECT); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals( datasetList.toArray(), Iterables.toArray(page.getValues(), DatasetInfo.class)); - verify(bigqueryRpcMock).listDatasets(OTHER_PROJECT, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listDatasetsSkipExceptionTranslation(OTHER_PROJECT, EMPTY_RPC_OPTIONS); } @Test - public void testListEmptyDatasets() { + void testListEmptyDatasets() throws IOException { ImmutableList datasets = ImmutableList.of(); Tuple> result = Tuple.>of(null, datasets); - when(bigqueryRpcMock.listDatasets(PROJECT, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listDatasetsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS)) + .thenReturn(result); bigquery = options.getService(); Page page = bigquery.listDatasets(); assertNull(page.getNextPageToken()); assertArrayEquals( ImmutableList.of().toArray(), Iterables.toArray(page.getValues(), Dataset.class)); - verify(bigqueryRpcMock).listDatasets(PROJECT, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listDatasetsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS); } @Test - public void testListDatasetsWithOptions() { + void testListDatasetsWithOptions() throws IOException { bigquery = options.getService(); ImmutableList datasetList = ImmutableList.of( @@ -720,71 +767,83 @@ public void testListDatasetsWithOptions() { new Dataset(bigquery, new DatasetInfo.BuilderImpl(OTHER_DATASET_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(datasetList, DatasetInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listDatasets(PROJECT, DATASET_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listDatasetsSkipExceptionTranslation(PROJECT, DATASET_LIST_OPTIONS)) + .thenReturn(result); Page page = bigquery.listDatasets(DATASET_LIST_ALL, DATASET_LIST_PAGE_TOKEN, DATASET_LIST_PAGE_SIZE); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals( datasetList.toArray(), Iterables.toArray(page.getValues(), DatasetInfo.class)); - verify(bigqueryRpcMock).listDatasets(PROJECT, DATASET_LIST_OPTIONS); + verify(bigqueryRpcMock).listDatasetsSkipExceptionTranslation(PROJECT, DATASET_LIST_OPTIONS); } @Test - public void testDeleteDataset() { - when(bigqueryRpcMock.deleteDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(true); + void testDeleteDataset() throws IOException { + when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(DATASET)); - verify(bigqueryRpcMock).deleteDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testDeleteDatasetFromDatasetId() { - when(bigqueryRpcMock.deleteDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(true); + void testDeleteDatasetFromDatasetId() throws IOException { + when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(DatasetId.of(DATASET))); - verify(bigqueryRpcMock).deleteDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testDeleteDatasetFromDatasetIdWithProject() { + void testDeleteDatasetFromDatasetIdWithProject() throws IOException { DatasetId datasetId = DatasetId.of(OTHER_PROJECT, DATASET); - when(bigqueryRpcMock.deleteDataset(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(true); + when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation( + OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(datasetId)); - verify(bigqueryRpcMock).deleteDataset(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .deleteDatasetSkipExceptionTranslation(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testDeleteDatasetWithOptions() { - when(bigqueryRpcMock.deleteDataset(PROJECT, DATASET, DATASET_DELETE_OPTIONS)).thenReturn(true); + void testDeleteDatasetWithOptions() throws IOException { + when(bigqueryRpcMock.deleteDatasetSkipExceptionTranslation( + PROJECT, DATASET, DATASET_DELETE_OPTIONS)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(DATASET, DATASET_DELETE_CONTENTS)); - verify(bigqueryRpcMock).deleteDataset(PROJECT, DATASET, DATASET_DELETE_OPTIONS); + verify(bigqueryRpcMock) + .deleteDatasetSkipExceptionTranslation(PROJECT, DATASET, DATASET_DELETE_OPTIONS); } @Test - public void testUpdateDataset() { + void testUpdateDataset() throws IOException { DatasetInfo updatedDatasetInfo = - DATASET_INFO - .setProjectId(OTHER_PROJECT) - .toBuilder() + DATASET_INFO.setProjectId(OTHER_PROJECT).toBuilder() .setDescription("newDescription") .build(); - when(bigqueryRpcMock.patch(updatedDatasetInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.patchSkipExceptionTranslation( + updatedDatasetInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(updatedDatasetInfo.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.update(updatedDatasetInfo); assertEquals(new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedDatasetInfo)), dataset); - verify(bigqueryRpcMock).patch(updatedDatasetInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .patchSkipExceptionTranslation(updatedDatasetInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testUpdateDatasetWithSelectedFields() { + void testUpdateDatasetWithSelectedFields() throws IOException { DatasetInfo updatedDatasetInfo = DATASET_INFO.toBuilder().setDescription("newDescription").build(); DatasetInfo updatedDatasetInfoWithProject = DATASET_INFO_WITH_PROJECT.toBuilder().setDescription("newDescription").build(); - when(bigqueryRpcMock.patch(eq(updatedDatasetInfoWithProject.toPb()), capturedOptions.capture())) + when(bigqueryRpcMock.patchSkipExceptionTranslation( + eq(updatedDatasetInfoWithProject.toPb()), capturedOptions.capture())) .thenReturn(updatedDatasetInfoWithProject.toPb()); bigquery = options.getService(); Dataset dataset = bigquery.update(updatedDatasetInfo, DATASET_OPTION_FIELDS); @@ -796,37 +855,60 @@ public void testUpdateDatasetWithSelectedFields() { assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedDatasetInfoWithProject)), dataset); verify(bigqueryRpcMock) - .patch(eq(updatedDatasetInfoWithProject.toPb()), capturedOptions.capture()); + .patchSkipExceptionTranslation( + eq(updatedDatasetInfoWithProject.toPb()), capturedOptions.capture()); } @Test - public void testCreateTable() { + void testCreateTable() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.create(tableInfo.toPb(), EMPTY_RPC_OPTIONS)).thenReturn(tableInfo.toPb()); + when(bigqueryRpcMock.createSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(tableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.create(tableInfo); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(tableInfo)), table); - verify(bigqueryRpcMock).create(tableInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).createSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS); + } + + @Test + void tesCreateExternalTable() throws IOException { + TableInfo createTableInfo = + TableInfo.of(TABLE_ID, ExternalTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build()) + .setProjectId(OTHER_PROJECT); + + com.google.api.services.bigquery.model.Table expectedCreateInput = + createTableInfo.toPb().setSchema(TABLE_SCHEMA.toPb()); + expectedCreateInput.getExternalDataConfiguration().setSchema(null); + when(bigqueryRpcMock.createSkipExceptionTranslation(expectedCreateInput, EMPTY_RPC_OPTIONS)) + .thenReturn(createTableInfo.toPb()); + BigQueryOptions bigQueryOptions = + createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); + bigquery = bigQueryOptions.getService(); + Table table = bigquery.create(createTableInfo); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(createTableInfo)), table); + verify(bigqueryRpcMock).createSkipExceptionTranslation(expectedCreateInput, EMPTY_RPC_OPTIONS); } @Test - public void testCreateTableWithoutProject() { + void testCreateTableWithoutProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(PROJECT); TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); tableInfo.toBuilder().setTableId(tableId); - when(bigqueryRpcMock.create(tableInfo.toPb(), EMPTY_RPC_OPTIONS)).thenReturn(tableInfo.toPb()); + when(bigqueryRpcMock.createSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(tableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.create(tableInfo); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(tableInfo)), table); - verify(bigqueryRpcMock).create(tableInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).createSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testCreateTableWithSelectedFields() { - when(bigqueryRpcMock.create(eq(TABLE_INFO_WITH_PROJECT.toPb()), capturedOptions.capture())) + void testCreateTableWithSelectedFields() throws IOException { + when(bigqueryRpcMock.createSkipExceptionTranslation( + eq(TABLE_INFO_WITH_PROJECT.toPb()), capturedOptions.capture())) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Table table = bigquery.create(TABLE_INFO, TABLE_OPTION_FIELDS); @@ -836,34 +918,40 @@ public void testCreateTableWithSelectedFields() { assertTrue(selector.contains("etag")); assertEquals(31, selector.length()); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); - verify(bigqueryRpcMock).create(eq(TABLE_INFO_WITH_PROJECT.toPb()), capturedOptions.capture()); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation( + eq(TABLE_INFO_WITH_PROJECT.toPb()), capturedOptions.capture()); } @Test - public void testGetTable() { - when(bigqueryRpcMock.getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + void testGetTable() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Table table = bigquery.getTable(DATASET, TABLE); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); - verify(bigqueryRpcMock).getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetModel() { - when(bigqueryRpcMock.getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) + void testGetModel() throws IOException { + when(bigqueryRpcMock.getModelSkipExceptionTranslation( + PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) .thenReturn(MODEL_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Model model = bigquery.getModel(DATASET, MODEL); assertEquals(new Model(bigquery, new ModelInfo.BuilderImpl(MODEL_INFO_WITH_PROJECT)), model); - verify(bigqueryRpcMock).getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getModelSkipExceptionTranslation(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); } @Test - public void testGetModelNotFoundWhenThrowIsEnabled() { + void testGetModelNotFoundWhenThrowIsEnabled() throws IOException { String expected = "Model not found"; - when(bigqueryRpcMock.getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) - .thenReturn(null) + when(bigqueryRpcMock.getModelSkipExceptionTranslation( + PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, expected)); options.setThrowNotFound(true); bigquery = options.getService(); @@ -872,91 +960,105 @@ public void testGetModelNotFoundWhenThrowIsEnabled() { } catch (BigQueryException ex) { assertEquals(expected, ex.getMessage()); } - verify(bigqueryRpcMock).getModel(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getModelSkipExceptionTranslation(PROJECT, DATASET, MODEL, EMPTY_RPC_OPTIONS); } @Test - public void testListPartition() { - when(bigqueryRpcMock.getTable( + void testListPartition() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( PROJECT, DATASET, "table$__PARTITIONS_SUMMARY__", EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_PARTITIONS.toPb()); - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_DATA_WITH_PARTITIONS); bigquery = options.getService(); List partition = bigquery.listPartitions(TABLE_ID_WITH_PROJECT); assertEquals(3, partition.size()); verify(bigqueryRpcMock) - .getTable(PROJECT, DATASET, "table$__PARTITIONS_SUMMARY__", EMPTY_RPC_OPTIONS); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + .getTableSkipExceptionTranslation( + PROJECT, DATASET, "table$__PARTITIONS_SUMMARY__", EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetTableNotFoundWhenThrowIsDisabled() { - when(bigqueryRpcMock.getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + void testGetTableNotFoundWhenThrowIsDisabled() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); options.setThrowNotFound(false); bigquery = options.getService(); Table table = bigquery.getTable(DATASET, TABLE); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); - verify(bigqueryRpcMock).getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetTableNotFoundWhenThrowIsEnabled() { - when(bigqueryRpcMock.getTable(PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS)) - .thenReturn(null) + void testGetTableNotFoundWhenThrowIsEnabled() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, "Table not found")); options.setThrowNotFound(true); bigquery = options.getService(); try { bigquery.getTable(DATASET, "table-not-found"); - Assert.fail(); + Assertions.fail(); } catch (BigQueryException ex) { - Assert.assertNotNull(ex.getMessage()); + Assertions.assertNotNull(ex.getMessage()); } - verify(bigqueryRpcMock).getTable(PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(PROJECT, DATASET, "table-not-found", EMPTY_RPC_OPTIONS); } @Test - public void testGetTableFromTableId() { - when(bigqueryRpcMock.getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + void testGetTableFromTableId() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Table table = bigquery.getTable(TABLE_ID); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); - verify(bigqueryRpcMock).getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetTableFromTableIdWithProject() { + void testGetTableFromTableIdWithProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(OTHER_PROJECT); TableId tableId = TABLE_ID.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.getTable(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(tableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.getTable(tableId); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(tableInfo)), table); - verify(bigqueryRpcMock).getTable(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetTableFromTableIdWithoutProject() { + void testGetTableFromTableIdWithoutProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(PROJECT); TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); - when(bigqueryRpcMock.getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(tableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.getTable(tableId); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(tableInfo)), table); - verify(bigqueryRpcMock).getTable(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getTableSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testGetTableWithSelectedFields() { - when(bigqueryRpcMock.getTable(eq(PROJECT), eq(DATASET), eq(TABLE), capturedOptions.capture())) + void testGetTableWithSelectedFields() throws IOException { + when(bigqueryRpcMock.getTableSkipExceptionTranslation( + eq(PROJECT), eq(DATASET), eq(TABLE), capturedOptions.capture())) .thenReturn(TABLE_INFO_WITH_PROJECT.toPb()); bigquery = options.getService(); Table table = bigquery.getTable(TABLE_ID, TABLE_OPTION_FIELDS); @@ -967,11 +1069,12 @@ public void testGetTableWithSelectedFields() { assertEquals(31, selector.length()); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PROJECT)), table); verify(bigqueryRpcMock) - .getTable(eq(PROJECT), eq(DATASET), eq(TABLE), capturedOptions.capture()); + .getTableSkipExceptionTranslation( + eq(PROJECT), eq(DATASET), eq(TABLE), capturedOptions.capture()); } @Test - public void testListTables() { + void testListTables() throws IOException { bigquery = options.getService(); ImmutableList

    tableList = ImmutableList.of( @@ -980,60 +1083,67 @@ public void testListTables() { new Table(bigquery, new TableInfo.BuilderImpl(MODEL_TABLE_INFO_WITH_PROJECT))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page
    page = bigquery.listTables(DATASET); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testListTablesReturnedParameters() { + void testListTablesReturnedParameters() throws IOException { bigquery = options.getService(); ImmutableList
    tableList = ImmutableList.of( new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PARTITIONS))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS)) + .thenReturn(result); Page
    page = bigquery.listTables(DATASET, TABLE_LIST_PAGE_SIZE, TABLE_LIST_PAGE_TOKEN); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS); } @Test - public void testListTablesReturnedParametersNullType() { + void testListTablesReturnedParametersNullType() throws IOException { bigquery = options.getService(); ImmutableList
    tableList = ImmutableList.of( new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_WITH_PARTITIONS_NULL_TYPE))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS)) + .thenReturn(result); Page
    page = bigquery.listTables(DATASET, TABLE_LIST_PAGE_SIZE, TABLE_LIST_PAGE_TOKEN); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS); } @Test - public void testListTablesWithRangePartitioning() { + void testListTablesWithRangePartitioning() throws IOException { bigquery = options.getService(); ImmutableList
    tableList = ImmutableList.of( new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO_RANGE_PARTITIONING))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS)) + .thenReturn(result); Page
    page = bigquery.listTables(DATASET, TABLE_LIST_PAGE_SIZE, TABLE_LIST_PAGE_TOKEN); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS); } @Test - public void testListTablesFromDatasetId() { + void testListTablesFromDatasetId() throws IOException { bigquery = options.getService(); ImmutableList
    tableList = ImmutableList.of( @@ -1041,30 +1151,51 @@ public void testListTablesFromDatasetId() { new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page
    page = bigquery.listTables(DatasetId.of(DATASET)); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testListTablesFromDatasetIdWithProject() { + void testListTablesFromDatasetIdWithProject() throws IOException { bigquery = options.getService(); ImmutableList
    tableList = ImmutableList.of( new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO.setProjectId(OTHER_PROJECT)))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation( + OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page
    page = bigquery.listTables(DatasetId.of(OTHER_PROJECT, DATASET)); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listTablesSkipExceptionTranslation(OTHER_PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testListTablesWithOptions() { + void testListTablesWithLabels() throws IOException { + bigquery = options.getService(); + ImmutableList
    tableList = + ImmutableList.of( + new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_WITH_LABELS_INFO))); + Tuple> result = + Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); + Page
    page = bigquery.listTables(DATASET); + assertEquals(CURSOR, page.getNextPageToken()); + assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); + verify(bigqueryRpcMock).listTablesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + assertEquals(LABELS, page.getValues().iterator().next().getLabels()); + } + + @Test + void testListTablesWithOptions() throws IOException { bigquery = options.getService(); ImmutableList
    tableList = ImmutableList.of( @@ -1072,15 +1203,17 @@ public void testListTablesWithOptions() { new Table(bigquery, new TableInfo.BuilderImpl(OTHER_TABLE_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(tableList, TableInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS)) + .thenReturn(result); Page
    page = bigquery.listTables(DATASET, TABLE_LIST_PAGE_SIZE, TABLE_LIST_PAGE_TOKEN); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(tableList.toArray(), Iterables.toArray(page.getValues(), Table.class)); - verify(bigqueryRpcMock).listTables(PROJECT, DATASET, TABLE_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTablesSkipExceptionTranslation(PROJECT, DATASET, TABLE_LIST_OPTIONS); } @Test - public void testListModels() { + void testListModels() throws IOException { bigquery = options.getService(); ImmutableList modelList = ImmutableList.of( @@ -1088,15 +1221,16 @@ public void testListModels() { new Model(bigquery, new ModelInfo.BuilderImpl(OTHER_MODEL_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(modelList, ModelInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listModelsSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listModels(DATASET); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(modelList.toArray(), Iterables.toArray(page.getValues(), Model.class)); - verify(bigqueryRpcMock).listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listModelsSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testListModelsWithModelId() { + void testListModelsWithModelId() throws IOException { bigquery = options.getService(); ImmutableList modelList = ImmutableList.of( @@ -1104,109 +1238,136 @@ public void testListModelsWithModelId() { new Model(bigquery, new ModelInfo.BuilderImpl(OTHER_MODEL_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(modelList, ModelInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listModelsSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listModels(DatasetId.of(DATASET)); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(modelList.toArray(), Iterables.toArray(page.getValues(), Model.class)); - verify(bigqueryRpcMock).listModels(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listModelsSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testDeleteTable() { - when(bigqueryRpcMock.deleteTable(PROJECT, DATASET, TABLE)).thenReturn(true); + void testDeleteTable() throws IOException { + when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(TABLE_ID)); - verify(bigqueryRpcMock).deleteTable(PROJECT, DATASET, TABLE); + verify(bigqueryRpcMock).deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE); } @Test - public void testDeleteTableFromTableId() { - when(bigqueryRpcMock.deleteTable(PROJECT, DATASET, TABLE)).thenReturn(true); + void testDeleteTableFromTableId() throws IOException { + when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(TABLE_ID)); - verify(bigqueryRpcMock).deleteTable(PROJECT, DATASET, TABLE); + verify(bigqueryRpcMock).deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE); } @Test - public void testDeleteTableFromTableIdWithProject() { + void testDeleteTableFromTableIdWithProject() throws IOException { TableId tableId = TABLE_ID.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.deleteTable(OTHER_PROJECT, DATASET, TABLE)).thenReturn(true); + when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE)) + .thenReturn(true); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); assertTrue(bigquery.delete(tableId)); - verify(bigqueryRpcMock).deleteTable(OTHER_PROJECT, DATASET, TABLE); + verify(bigqueryRpcMock).deleteTableSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE); } @Test - public void testDeleteTableFromTableIdWithoutProject() { + void testDeleteTableFromTableIdWithoutProject() throws IOException { TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); - when(bigqueryRpcMock.deleteTable(PROJECT, DATASET, TABLE)).thenReturn(true); + when(bigqueryRpcMock.deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE)) + .thenReturn(true); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); assertTrue(bigquery.delete(tableId)); - verify(bigqueryRpcMock).deleteTable(PROJECT, DATASET, TABLE); + verify(bigqueryRpcMock).deleteTableSkipExceptionTranslation(PROJECT, DATASET, TABLE); } @Test - public void testDeleteModel() { - when(bigqueryRpcMock.deleteModel(PROJECT, DATASET, MODEL)).thenReturn(true); + void testDeleteModel() throws IOException { + when(bigqueryRpcMock.deleteModelSkipExceptionTranslation(PROJECT, DATASET, MODEL)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(ModelId.of(DATASET, MODEL))); - verify(bigqueryRpcMock).deleteModel(PROJECT, DATASET, MODEL); + verify(bigqueryRpcMock).deleteModelSkipExceptionTranslation(PROJECT, DATASET, MODEL); } @Test - public void testUpdateModel() { + void testUpdateModel() throws IOException { ModelInfo updateModelInfo = - MODEL_INFO_WITH_PROJECT - .setProjectId(OTHER_PROJECT) - .toBuilder() + MODEL_INFO_WITH_PROJECT.setProjectId(OTHER_PROJECT).toBuilder() .setDescription("newDescription") .build(); - when(bigqueryRpcMock.patch(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.patchSkipExceptionTranslation(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(updateModelInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Model actualModel = bigquery.update(updateModelInfo); assertEquals(new Model(bigquery, new ModelInfo.BuilderImpl(updateModelInfo)), actualModel); - verify(bigqueryRpcMock).patch(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .patchSkipExceptionTranslation(updateModelInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testUpdateTable() { + void testUpdateTable() throws IOException { TableInfo updatedTableInfo = TABLE_INFO.setProjectId(OTHER_PROJECT).toBuilder().setDescription("newDescription").build(); - when(bigqueryRpcMock.patch(updatedTableInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.patchSkipExceptionTranslation(updatedTableInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(updatedTableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.update(updatedTableInfo); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfo)), table); - verify(bigqueryRpcMock).patch(updatedTableInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .patchSkipExceptionTranslation(updatedTableInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testUpdateTableWithoutProject() { + void testUpdateExternalTableWithNewSchema() throws IOException { + TableInfo updatedTableInfo = + TableInfo.of(TABLE_ID, ExternalTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build()) + .setProjectId(OTHER_PROJECT); + + com.google.api.services.bigquery.model.Table expectedPatchInput = + updatedTableInfo.toPb().setSchema(TABLE_SCHEMA.toPb()); + expectedPatchInput.getExternalDataConfiguration().setSchema(null); + when(bigqueryRpcMock.patchSkipExceptionTranslation(expectedPatchInput, EMPTY_RPC_OPTIONS)) + .thenReturn(updatedTableInfo.toPb()); + BigQueryOptions bigQueryOptions = + createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); + bigquery = bigQueryOptions.getService(); + Table table = bigquery.update(updatedTableInfo); + assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfo)), table); + verify(bigqueryRpcMock).patchSkipExceptionTranslation(expectedPatchInput, EMPTY_RPC_OPTIONS); + } + + @Test + void testUpdateTableWithoutProject() throws IOException { TableInfo tableInfo = TABLE_INFO.setProjectId(PROJECT); TableId tableId = TableId.of("", TABLE_ID.getDataset(), TABLE_ID.getTable()); tableInfo.toBuilder().setTableId(tableId); - when(bigqueryRpcMock.patch(tableInfo.toPb(), EMPTY_RPC_OPTIONS)).thenReturn(tableInfo.toPb()); + when(bigqueryRpcMock.patchSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS)) + .thenReturn(tableInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Table table = bigquery.update(tableInfo); assertEquals(new Table(bigquery, new TableInfo.BuilderImpl(tableInfo)), table); - verify(bigqueryRpcMock).patch(tableInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).patchSkipExceptionTranslation(tableInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testUpdateTableWithSelectedFields() { + void testUpdateTableWithSelectedFields() throws IOException { TableInfo updatedTableInfo = TABLE_INFO.toBuilder().setDescription("newDescription").build(); TableInfo updatedTableInfoWithProject = TABLE_INFO_WITH_PROJECT.toBuilder().setDescription("newDescription").build(); - when(bigqueryRpcMock.patch(eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture())) + when(bigqueryRpcMock.patchSkipExceptionTranslation( + eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture())) .thenReturn(updatedTableInfoWithProject.toPb()); bigquery = options.getService(); Table table = bigquery.update(updatedTableInfo, TABLE_OPTION_FIELDS); @@ -1218,11 +1379,32 @@ public void testUpdateTableWithSelectedFields() { assertEquals( new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfoWithProject)), table); verify(bigqueryRpcMock) - .patch(eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture()); + .patchSkipExceptionTranslation( + eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture()); + } + + @Test + void testUpdateTableWithAutoDetectSchema() throws IOException { + TableInfo updatedTableInfo = TABLE_INFO.toBuilder().setDescription("newDescription").build(); + TableInfo updatedTableInfoWithProject = + TABLE_INFO_WITH_PROJECT.toBuilder().setDescription("newDescription").build(); + when(bigqueryRpcMock.patchSkipExceptionTranslation( + eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture())) + .thenReturn(updatedTableInfoWithProject.toPb()); + bigquery = options.getService(); + Table table = bigquery.update(updatedTableInfo, BigQuery.TableOption.autodetectSchema(true)); + Boolean selector = + (Boolean) capturedOptions.getValue().get(BigQueryRpc.Option.AUTODETECT_SCHEMA); + assertTrue(selector); + assertEquals( + new Table(bigquery, new TableInfo.BuilderImpl(updatedTableInfoWithProject)), table); + verify(bigqueryRpcMock) + .patchSkipExceptionTranslation( + eq(updatedTableInfoWithProject.toPb()), capturedOptions.capture()); } @Test - public void testInsertAllWithRowIdShouldRetry() { + void testInsertAllWithRowIdShouldRetry() throws IOException { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = @@ -1257,12 +1439,11 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { new TableDataInsertAllResponse.InsertErrors() .setIndex(0L) .setErrors(ImmutableList.of(new ErrorProto().setMessage("ErrorMessage"))))); - when(bigqueryRpcMock.insertAll(PROJECT, DATASET, TABLE, requestPb)) + when(bigqueryRpcMock.insertAllSkipExceptionTranslation(PROJECT, DATASET, TABLE, requestPb)) .thenThrow(new BigQueryException(500, "InternalError")) .thenReturn(responsePb); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -1271,11 +1452,12 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { assertNull(response.getErrorsFor(1L)); assertEquals(1, response.getErrorsFor(0L).size()); assertEquals("ErrorMessage", response.getErrorsFor(0L).get(0).getMessage()); - verify(bigqueryRpcMock, times(2)).insertAll(PROJECT, DATASET, TABLE, requestPb); + verify(bigqueryRpcMock, times(2)) + .insertAllSkipExceptionTranslation(PROJECT, DATASET, TABLE, requestPb); } @Test - public void testInsertAllWithoutRowIdShouldNotRetry() { + void testInsertAllWithoutRowIdShouldNotRetry() { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = ImmutableList.of(RowToInsert.of(row1), RowToInsert.of(row2)); @@ -1305,22 +1487,21 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { when(bigqueryRpcMock.insertAll(PROJECT, DATASET, TABLE, requestPb)) .thenThrow(new BigQueryException(500, "InternalError")); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); try { bigquery.insertAll(request); - Assert.fail(); + Assertions.fail(); } catch (BigQueryException ex) { - Assert.assertNotNull(ex.getMessage()); + Assertions.assertNotNull(ex.getMessage()); } verify(bigqueryRpcMock).insertAll(PROJECT, DATASET, TABLE, requestPb); } @Test - public void testInsertAllWithProject() { + void testInsertAllWithProject() throws IOException { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = @@ -1356,7 +1537,8 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { new TableDataInsertAllResponse.InsertErrors() .setIndex(0L) .setErrors(ImmutableList.of(new ErrorProto().setMessage("ErrorMessage"))))); - when(bigqueryRpcMock.insertAll(OTHER_PROJECT, DATASET, TABLE, requestPb)) + when(bigqueryRpcMock.insertAllSkipExceptionTranslation( + OTHER_PROJECT, DATASET, TABLE, requestPb)) .thenReturn(responsePb); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); @@ -1366,11 +1548,12 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { assertNull(response.getErrorsFor(1L)); assertEquals(1, response.getErrorsFor(0L).size()); assertEquals("ErrorMessage", response.getErrorsFor(0L).get(0).getMessage()); - verify(bigqueryRpcMock).insertAll(OTHER_PROJECT, DATASET, TABLE, requestPb); + verify(bigqueryRpcMock) + .insertAllSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE, requestPb); } @Test - public void testInsertAllWithProjectInTable() { + void testInsertAllWithProjectInTable() throws IOException { Map row1 = ImmutableMap.of("field", "value1"); Map row2 = ImmutableMap.of("field", "value2"); List rows = @@ -1406,7 +1589,8 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { new TableDataInsertAllResponse.InsertErrors() .setIndex(0L) .setErrors(ImmutableList.of(new ErrorProto().setMessage("ErrorMessage"))))); - when(bigqueryRpcMock.insertAll("project-different-from-option", DATASET, TABLE, requestPb)) + when(bigqueryRpcMock.insertAllSkipExceptionTranslation( + "project-different-from-option", DATASET, TABLE, requestPb)) .thenReturn(responsePb); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); @@ -1416,47 +1600,56 @@ public TableDataInsertAllRequest.Rows apply(RowToInsert rowToInsert) { assertNull(response.getErrorsFor(1L)); assertEquals(1, response.getErrorsFor(0L).size()); assertEquals("ErrorMessage", response.getErrorsFor(0L).get(0).getMessage()); - verify(bigqueryRpcMock).insertAll("project-different-from-option", DATASET, TABLE, requestPb); + verify(bigqueryRpcMock) + .insertAllSkipExceptionTranslation( + "project-different-from-option", DATASET, TABLE, requestPb); } @Test - public void testListTableData() { - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + void testListTableData() throws IOException { + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_DATA_PB); bigquery = options.getService(); Page page = bigquery.listTableData(DATASET, TABLE); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(TABLE_DATA.toArray(), Iterables.toArray(page.getValues(), List.class)); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testListTableDataFromTableId() { - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + void testListTableDataFromTableId() throws IOException { + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_DATA_PB); bigquery = options.getService(); Page page = bigquery.listTableData(TableId.of(DATASET, TABLE)); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(TABLE_DATA.toArray(), Iterables.toArray(page.getValues(), List.class)); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testListTableDataFromTableIdWithProject() { + void testListTableDataFromTableIdWithProject() throws IOException { TableId tableId = TABLE_ID.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.listTableData(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(TABLE_DATA_PB); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Page page = bigquery.listTableData(tableId); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(TABLE_DATA.toArray(), Iterables.toArray(page.getValues(), List.class)); - verify(bigqueryRpcMock).listTableData(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(OTHER_PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testListTableDataWithOptions() { - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS)) + void testListTableDataWithOptions() throws IOException { + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS)) .thenReturn(TABLE_DATA_PB); bigquery = options.getService(); Page page = @@ -1468,14 +1661,15 @@ public void testListTableDataWithOptions() { TABLE_DATA_LIST_START_INDEX); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(TABLE_DATA.toArray(), Iterables.toArray(page.getValues(), List.class)); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); } @Test - public void testListTableDataWithNextPage() { + void testListTableDataWithNextPage() throws IOException { doReturn(TABLE_DATA_PB) .when(bigqueryRpcMock) - .listTableData(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); bigquery = options.getService(); TableResult page = bigquery.listTableData( @@ -1485,7 +1679,8 @@ public void testListTableDataWithNextPage() { TABLE_DATA_LIST_PAGE_TOKEN, TABLE_DATA_LIST_START_INDEX); assertEquals(CURSOR, page.getNextPageToken()); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, TABLE_DATA_LIST_OPTIONS); assertArrayEquals(TABLE_DATA.toArray(), Iterables.toArray(page.getValues(), List.class)); Map SECOND_TABLE_DATA_LIST_OPTIONS = ImmutableMap.of(BigQueryRpc.Option.PAGE_TOKEN, CURSOR, BigQueryRpc.Option.START_INDEX, 0L); @@ -1498,11 +1693,14 @@ public void testListTableDataWithNextPage() { new TableRow().setF(ImmutableList.of(new TableCell().setV("Value3"))), new TableRow().setF(ImmutableList.of(new TableCell().setV("Value4")))))) .when(bigqueryRpcMock) - .listTableData(PROJECT, DATASET, TABLE, SECOND_TABLE_DATA_LIST_OPTIONS); + .listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, SECOND_TABLE_DATA_LIST_OPTIONS); assertTrue(page.hasNextPage()); page = page.getNextPage(); assertNull(page.getNextPageToken()); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, SECOND_TABLE_DATA_LIST_OPTIONS); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, SECOND_TABLE_DATA_LIST_OPTIONS); } // The "minimally initialized" Job that lets Job.fromPb run without throwing. @@ -1514,27 +1712,205 @@ private static com.google.api.services.bigquery.model.Job newJobPb() { } @Test - public void testCreateJobSuccess() { + void testCreateJobSuccess() throws IOException { String id = "testCreateJobSuccess-id"; JobId jobId = JobId.of(id); String query = "SELECT * in FOO"; - when(bigqueryRpcMock.create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) .thenReturn(newJobPb()); bigquery = options.getService(); assertThat(bigquery.create(JobInfo.of(jobId, QueryJobConfiguration.of(query)))).isNotNull(); assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); - verify(bigqueryRpcMock).create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + } + + @Test + void testCreateJobFailureShouldRetryExceptionHandlerExceptions() throws IOException { + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + .thenThrow(new UnknownHostException()) + .thenThrow(new ConnectException()) + .thenReturn(newJobPb()); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + ((BigQueryImpl) bigquery).create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY)); + verify(bigqueryRpcMock, times(3)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + } + + @Test + void testCreateJobFailureShouldRetry() throws IOException { + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + .thenThrow(new BigQueryException(500, "InternalError")) + .thenThrow(new BigQueryException(502, "Bad Gateway")) + .thenThrow(new BigQueryException(503, "Service Unavailable")) + .thenThrow( + new BigQueryException( + 400, RATE_LIMIT_ERROR_MSG)) // retrial on based on RATE_LIMIT_EXCEEDED_MSG + .thenThrow(new BigQueryException(200, RATE_LIMIT_ERROR_MSG)) + .thenReturn(newJobPb()); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + ((BigQueryImpl) bigquery).create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY)); + verify(bigqueryRpcMock, times(6)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + } + + @Test + void testCreateJobWithBigQueryRetryConfigFailureShouldRetry() throws IOException { + // Validate create job with BigQueryRetryConfig that retries on rate limit error message. + JobOption bigQueryRetryConfigOption = + JobOption.bigQueryRetryConfig( + BigQueryRetryConfig.newBuilder() + .retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG) + .retryOnMessage(BigQueryErrorMessages.JOB_RATE_LIMIT_EXCEEDED_MSG) + .retryOnRegEx(BigQueryErrorMessages.RetryRegExPatterns.RATE_LIMIT_EXCEEDED_REGEX) + .build()); + + Map bigQueryRpcOptions = optionMap(bigQueryRetryConfigOption); + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(bigQueryRpcOptions))) + .thenThrow( + new BigQueryException( + 400, RATE_LIMIT_ERROR_MSG)) // retrial on based on RATE_LIMIT_EXCEEDED_MSG + .thenThrow(new BigQueryException(200, RATE_LIMIT_ERROR_MSG)) + .thenReturn(newJobPb()); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + ((BigQueryImpl) bigquery) + .create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), bigQueryRetryConfigOption); + verify(bigqueryRpcMock, times(3)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(bigQueryRpcOptions)); } @Test - public void testCreateJobWithSelectedFields() { - when(bigqueryRpcMock.create( + void testCreateJobWithBigQueryRetryConfigFailureShouldNotRetry() throws IOException { + // Validate create job with BigQueryRetryConfig that does not retry on rate limit error message. + JobOption bigQueryRetryConfigOption = + JobOption.bigQueryRetryConfig(BigQueryRetryConfig.newBuilder().build()); + + Map bigQueryRpcOptions = optionMap(bigQueryRetryConfigOption); + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(bigQueryRpcOptions))) + .thenThrow(new BigQueryException(400, RATE_LIMIT_ERROR_MSG)); + + // Job create will attempt to retrieve the job even in the case when the job is created in a + // returned failure. + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + nullable(String.class), nullable(String.class), nullable(String.class), any())) + .thenThrow(new BigQueryException(500, "InternalError")); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + BigQueryException e = + Assertions.assertThrows( + BigQueryException.class, + () -> + ((BigQueryImpl) bigquery) + .create( + JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), + bigQueryRetryConfigOption)); + assertNotNull(e.getMessage()); + // Verify that getQueryResults is attempted only once and not retried since the error message + // does not match. + verify(bigqueryRpcMock, times(1)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(bigQueryRpcOptions)); + } + + @Test + void testCreateJobWithRetryOptionsFailureShouldRetry() throws IOException { + // Validate create job with RetryOptions. + JobOption retryOptions = JobOption.retryOptions(RetryOption.maxAttempts(4)); + Map bigQueryRpcOptions = optionMap(retryOptions); + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(bigQueryRpcOptions))) + .thenThrow(new BigQueryException(500, "InternalError")) + .thenThrow(new BigQueryException(502, "Bad Gateway")) + .thenThrow(new BigQueryException(503, "Service Unavailable")) + .thenReturn(newJobPb()); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + ((BigQueryImpl) bigquery) + .create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), retryOptions); + verify(bigqueryRpcMock, times(4)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(bigQueryRpcOptions)); + } + + @Test + void testCreateJobWithRetryOptionsFailureShouldNotRetry() throws IOException { + // Validate create job with RetryOptions that only attempts once (no retry). + JobOption retryOptions = JobOption.retryOptions(RetryOption.maxAttempts(1)); + Map bigQueryRpcOptions = optionMap(retryOptions); + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(bigQueryRpcOptions))) + .thenThrow(new BigQueryException(500, "InternalError")) + .thenReturn(newJobPb()); + + // Job create will attempt to retrieve the job even in the case when the job is created in a + // returned failure. + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + nullable(String.class), nullable(String.class), nullable(String.class), any())) + .thenThrow(new BigQueryException(500, "InternalError")); + + bigquery = options.getService(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + BigQueryException e = + Assertions.assertThrows( + BigQueryException.class, + () -> + ((BigQueryImpl) bigquery) + .create(JobInfo.of(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY), retryOptions)); + assertNotNull(e.getMessage()); + verify(bigqueryRpcMock, times(1)) + .createSkipExceptionTranslation(jobCapture.capture(), eq(bigQueryRpcOptions)); + } + + @Test + void testCreateJobWithSelectedFields() throws IOException { + when(bigqueryRpcMock.createSkipExceptionTranslation( any(com.google.api.services.bigquery.model.Job.class), capturedOptions.capture())) .thenReturn(newJobPb()); - BigQuery.JobOption jobOptions = BigQuery.JobOption.fields(BigQuery.JobField.USER_EMAIL); + JobOption jobOptions = JobOption.fields(USER_EMAIL); bigquery = options.getService(); bigquery.create(JobInfo.of(QueryJobConfiguration.of("SOME QUERY")), jobOptions); @@ -1545,30 +1921,32 @@ public void testCreateJobWithSelectedFields() { .asList() .containsExactly("jobReference", "configuration", "user_email"); verify(bigqueryRpcMock) - .create(any(com.google.api.services.bigquery.model.Job.class), capturedOptions.capture()); + .createSkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class), capturedOptions.capture()); } @Test - public void testCreateJobNoGet() { + void testCreateJobNoGet() throws IOException { String id = "testCreateJobNoGet-id"; JobId jobId = JobId.of(id); String query = "SELECT * in FOO"; - when(bigqueryRpcMock.create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) .thenThrow(new BigQueryException(409, "already exists, for some reason")); bigquery = options.getService(); - try { - bigquery.create(JobInfo.of(jobId, QueryJobConfiguration.of(query))); - fail("should throw"); - } catch (BigQueryException e) { - assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); - } - verify(bigqueryRpcMock).create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + BigQueryException e = + Assertions.assertThrows( + BigQueryException.class, + () -> bigquery.create(JobInfo.of(jobId, QueryJobConfiguration.of(query)))); + assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); } @Test - public void testCreateJobTryGet() { + void testCreateJobTryGet() throws IOException { final String id = "testCreateJobTryGet-id"; String query = "SELECT * in FOO"; Supplier idProvider = @@ -1579,27 +1957,63 @@ public JobId get() { } }; - when(bigqueryRpcMock.create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) .thenThrow(new BigQueryException(409, "already exists, for some reason")); - when(bigqueryRpcMock.getJob( + when(bigqueryRpcMock.getJobSkipExceptionTranslation( any(String.class), eq(id), eq((String) null), eq(EMPTY_RPC_OPTIONS))) .thenReturn(newJobPb()); bigquery = options.getService(); ((BigQueryImpl) bigquery).create(JobInfo.of(QueryJobConfiguration.of(query)), idProvider); assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); - verify(bigqueryRpcMock).create(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); verify(bigqueryRpcMock) - .getJob(any(String.class), eq(id), eq((String) null), eq(EMPTY_RPC_OPTIONS)); + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation( + any(String.class), eq(id), eq((String) null), eq(EMPTY_RPC_OPTIONS)); } @Test - public void testCreateJobWithProjectId() { + void testCreateJobTryGetNotRandom() throws IOException { + Map withStatisticOption = optionMap(JobOption.fields(STATISTICS)); + final String id = "testCreateJobTryGet-id"; + String query = "SELECT * in FOO"; + + when(bigqueryRpcMock.createSkipExceptionTranslation( + jobCapture.capture(), eq(EMPTY_RPC_OPTIONS))) + .thenThrow( + new BigQueryException( + 409, + "already exists, for some reason", + new RuntimeException("Already Exists: Job"))); + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + any(String.class), eq(id), eq((String) null), eq(withStatisticOption))) + .thenReturn( + newJobPb() + .setId(id) + .setStatistics(new JobStatistics().setCreationTime(System.currentTimeMillis()))); + + bigquery = options.getService(); + Job job = + ((BigQueryImpl) bigquery).create(JobInfo.of(JobId.of(id), QueryJobConfiguration.of(query))); + assertThat(job).isNotNull(); + assertThat(jobCapture.getValue().getJobReference().getJobId()).isEqualTo(id); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation(jobCapture.capture(), eq(EMPTY_RPC_OPTIONS)); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation( + any(String.class), eq(id), eq((String) null), eq(withStatisticOption)); + } + + @Test + void testCreateJobWithProjectId() throws IOException { JobInfo jobInfo = JobInfo.newBuilder(QUERY_JOB_CONFIGURATION.setProjectId(OTHER_PROJECT)) .setJobId(JobId.of(OTHER_PROJECT, JOB)) .build(); - when(bigqueryRpcMock.create(eq(jobInfo.toPb()), capturedOptions.capture())) + when(bigqueryRpcMock.createSkipExceptionTranslation( + eq(jobInfo.toPb()), capturedOptions.capture())) .thenReturn(jobInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); @@ -1611,105 +2025,120 @@ public void testCreateJobWithProjectId() { assertTrue(selector.contains("configuration")); assertTrue(selector.contains("user_email")); assertEquals(37, selector.length()); - verify(bigqueryRpcMock).create(eq(jobInfo.toPb()), capturedOptions.capture()); + verify(bigqueryRpcMock) + .createSkipExceptionTranslation(eq(jobInfo.toPb()), capturedOptions.capture()); + } + + @Test + void testDeleteJob() throws IOException { + JobId jobId = JobId.newBuilder().setJob(JOB).setProject(PROJECT).setLocation(LOCATION).build(); + when(bigqueryRpcMock.deleteJobSkipExceptionTranslation(PROJECT, JOB, LOCATION)) + .thenReturn(true); + bigquery = options.getService(); + assertTrue(bigquery.delete(jobId)); + verify(bigqueryRpcMock).deleteJobSkipExceptionTranslation(PROJECT, JOB, LOCATION); } @Test - public void testGetJob() { - when(bigqueryRpcMock.getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + void testGetJob() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); bigquery = options.getService(); Job job = bigquery.getJob(JOB); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobWithLocation() { - when(bigqueryRpcMock.getJob(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) + void testGetJobWithLocation() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); BigQueryOptions options = createBigQueryOptionsForProjectWithLocation(PROJECT, rpcFactoryMock); bigquery = options.getService(); Job job = bigquery.getJob(JOB); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobNotFoundWhenThrowIsDisabled() { - when(bigqueryRpcMock.getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + void testGetJobNotFoundWhenThrowIsDisabled() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); options.setThrowNotFound(false); bigquery = options.getService(); Job job = bigquery.getJob(JOB); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobNotFoundWhenThrowIsEnabled() { - when(bigqueryRpcMock.getJob(PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS)) - .thenReturn(null) - .thenThrow(new BigQueryException(404, "Job not found")); + void testGetJobNotFoundWhenThrowIsEnabled() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS)) + .thenThrow(new IOException("Job not found")); options.setThrowNotFound(true); bigquery = options.getService(); - try { - bigquery.getJob("job-not-found"); - Assert.fail(); - } catch (BigQueryException ex) { - Assert.assertNotNull(ex.getMessage()); - } - verify(bigqueryRpcMock).getJob(PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS); + BigQueryException ex = + Assertions.assertThrows(BigQueryException.class, () -> bigquery.getJob("job-not-found")); + Assertions.assertNotNull(ex.getMessage()); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation(PROJECT, "job-not-found", null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobFromJobId() { - when(bigqueryRpcMock.getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + void testGetJobFromJobId() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); bigquery = options.getService(); Job job = bigquery.getJob(JobId.of(JOB)); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobFromJobIdWithLocation() { - when(bigqueryRpcMock.getJob(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) + void testGetJobFromJobIdWithLocation() throws IOException { + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) .thenReturn(COMPLETE_COPY_JOB.toPb()); BigQueryOptions options = createBigQueryOptionsForProjectWithLocation(PROJECT, rpcFactoryMock); bigquery = options.getService(); Job job = bigquery.getJob(JobId.of(JOB)); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(COMPLETE_COPY_JOB)), job); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation(PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobFromJobIdWithProject() { + void testGetJobFromJobIdWithProject() throws IOException { JobId jobId = JobId.of(OTHER_PROJECT, JOB); JobInfo jobInfo = COPY_JOB.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.getJob(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(jobInfo.toPb()); bigquery = options.getService(); Job job = bigquery.getJob(jobId); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(jobInfo)), job); - verify(bigqueryRpcMock).getJob(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetJobFromJobIdWithProjectWithLocation() { + void testGetJobFromJobIdWithProjectWithLocation() throws IOException { JobId jobId = JobId.of(OTHER_PROJECT, JOB); JobInfo jobInfo = COPY_JOB.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.getJob(OTHER_PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getJobSkipExceptionTranslation( + OTHER_PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS)) .thenReturn(jobInfo.toPb()); BigQueryOptions options = createBigQueryOptionsForProjectWithLocation(PROJECT, rpcFactoryMock); bigquery = options.getService(); Job job = bigquery.getJob(jobId); assertEquals(new Job(bigquery, new JobInfo.BuilderImpl(jobInfo)), job); - verify(bigqueryRpcMock).getJob(OTHER_PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getJobSkipExceptionTranslation(OTHER_PROJECT, JOB, LOCATION, EMPTY_RPC_OPTIONS); } @Test - public void testListJobs() { + void testListJobs() throws IOException { bigquery = options.getService(); ImmutableList jobList = ImmutableList.of( @@ -1726,15 +2155,16 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { return job.toPb(); } })); - when(bigqueryRpcMock.listJobs(PROJECT, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listJobsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listJobs(); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(jobList.toArray(), Iterables.toArray(page.getValues(), Job.class)); - verify(bigqueryRpcMock).listJobs(PROJECT, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).listJobsSkipExceptionTranslation(PROJECT, EMPTY_RPC_OPTIONS); } @Test - public void testListJobsWithOptions() { + void testListJobsWithOptions() throws IOException { bigquery = options.getService(); ImmutableList jobList = ImmutableList.of( @@ -1751,17 +2181,18 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { return job.toPb(); } })); - when(bigqueryRpcMock.listJobs(PROJECT, JOB_LIST_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listJobsSkipExceptionTranslation(PROJECT, JOB_LIST_OPTIONS)) + .thenReturn(result); Page page = bigquery.listJobs( JOB_LIST_ALL_USERS, JOB_LIST_STATE_FILTER, JOB_LIST_PAGE_TOKEN, JOB_LIST_PAGE_SIZE); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(jobList.toArray(), Iterables.toArray(page.getValues(), Job.class)); - verify(bigqueryRpcMock).listJobs(PROJECT, JOB_LIST_OPTIONS); + verify(bigqueryRpcMock).listJobsSkipExceptionTranslation(PROJECT, JOB_LIST_OPTIONS); } @Test - public void testListJobsWithSelectedFields() { + void testListJobsWithSelectedFields() throws IOException { bigquery = options.getService(); ImmutableList jobList = ImmutableList.of( @@ -1778,7 +2209,8 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { return job.toPb(); } })); - when(bigqueryRpcMock.listJobs(eq(PROJECT), capturedOptions.capture())).thenReturn(result); + when(bigqueryRpcMock.listJobsSkipExceptionTranslation(eq(PROJECT), capturedOptions.capture())) + .thenReturn(result); Page page = bigquery.listJobs(JOB_LIST_OPTION_FIELD); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(jobList.toArray(), Iterables.toArray(page.getValues(), Job.class)); @@ -1791,36 +2223,37 @@ public com.google.api.services.bigquery.model.Job apply(Job job) { assertTrue(selector.contains("errorResult")); assertTrue(selector.contains(")")); assertEquals(75, selector.length()); - verify(bigqueryRpcMock).listJobs(eq(PROJECT), capturedOptions.capture()); + verify(bigqueryRpcMock) + .listJobsSkipExceptionTranslation(eq(PROJECT), capturedOptions.capture()); } @Test - public void testCancelJob() { - when(bigqueryRpcMock.cancel(PROJECT, JOB, null)).thenReturn(true); + void testCancelJob() throws IOException { + when(bigqueryRpcMock.cancelSkipExceptionTranslation(PROJECT, JOB, null)).thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.cancel(JOB)); - verify(bigqueryRpcMock).cancel(PROJECT, JOB, null); + verify(bigqueryRpcMock).cancelSkipExceptionTranslation(PROJECT, JOB, null); } @Test - public void testCancelJobFromJobId() { - when(bigqueryRpcMock.cancel(PROJECT, JOB, null)).thenReturn(true); + void testCancelJobFromJobId() throws IOException { + when(bigqueryRpcMock.cancelSkipExceptionTranslation(PROJECT, JOB, null)).thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.cancel(JobId.of(PROJECT, JOB))); - verify(bigqueryRpcMock).cancel(PROJECT, JOB, null); + verify(bigqueryRpcMock).cancelSkipExceptionTranslation(PROJECT, JOB, null); } @Test - public void testCancelJobFromJobIdWithProject() { + void testCancelJobFromJobIdWithProject() throws IOException { JobId jobId = JobId.of(OTHER_PROJECT, JOB); - when(bigqueryRpcMock.cancel(OTHER_PROJECT, JOB, null)).thenReturn(true); + when(bigqueryRpcMock.cancelSkipExceptionTranslation(OTHER_PROJECT, JOB, null)).thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.cancel(jobId)); - verify(bigqueryRpcMock).cancel(OTHER_PROJECT, JOB, null); + verify(bigqueryRpcMock).cancelSkipExceptionTranslation(OTHER_PROJECT, JOB, null); } @Test - public void testQueryRequestCompleted() throws InterruptedException { + void testQueryRequestCompleted() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job jobResponsePb = new com.google.api.services.bigquery.model.Job() @@ -1840,13 +2273,13 @@ public void testQueryRequestCompleted() throws InterruptedException { .setTotalRows(BigInteger.valueOf(1L)) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.create( + when(bigqueryRpcMock.createSkipExceptionTranslation( JOB_INFO.toPb(), Collections.emptyMap())) .thenReturn(jobResponsePb); - when(bigqueryRpcMock.getQueryResults( - PROJECT, JOB, null, BigQueryImpl.optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) .thenReturn(responsePb); - when(bigqueryRpcMock.listTableData( + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( PROJECT, DATASET, TABLE, Collections.emptyMap())) .thenReturn( new TableDataList() @@ -1863,17 +2296,19 @@ public void testQueryRequestCompleted() throws InterruptedException { assertThat(row.get(1).getLongValue()).isEqualTo(1); } verify(bigqueryRpcMock) - .create(JOB_INFO.toPb(), Collections.emptyMap()); + .createSkipExceptionTranslation( + JOB_INFO.toPb(), Collections.emptyMap()); verify(bigqueryRpcMock) - .getQueryResults( - PROJECT, JOB, null, BigQueryImpl.optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); + .getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); verify(bigqueryRpcMock) - .listTableData(PROJECT, DATASET, TABLE, Collections.emptyMap()); + .listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, Collections.emptyMap()); } @Test - public void testFastQueryRequestCompleted() throws InterruptedException { + void testFastQueryRequestCompleted() throws InterruptedException, IOException { com.google.api.services.bigquery.model.QueryResponse queryResponsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -1885,7 +2320,7 @@ public void testFastQueryRequestCompleted() throws InterruptedException { .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenReturn(queryResponsePb); bigquery = options.getService(); @@ -1906,12 +2341,55 @@ public void testFastQueryRequestCompleted() throws InterruptedException { QUERY_JOB_CONFIGURATION_FOR_QUERY.getDefaultDataset().getDataset(), requestPb.getDefaultDataset().getDatasetId()); assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.useQueryCache(), requestPb.getUseQueryCache()); + assertNull(requestPb.getLocation()); - verify(bigqueryRpcMock).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testFastQueryMultiplePages() throws InterruptedException { + void testFastQueryRequestCompletedWithLocation() throws InterruptedException, IOException { + com.google.api.services.bigquery.model.QueryResponse queryResponsePb = + new com.google.api.services.bigquery.model.QueryResponse() + .setCacheHit(false) + .setJobComplete(true) + .setKind("bigquery#queryResponse") + .setPageToken(null) + .setRows(ImmutableList.of(TABLE_ROW)) + .setSchema(TABLE_SCHEMA.toPb()) + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) + .thenReturn(queryResponsePb); + + BigQueryOptions options = createBigQueryOptionsForProjectWithLocation(PROJECT, rpcFactoryMock); + bigquery = options.getService(); + TableResult result = bigquery.query(QUERY_JOB_CONFIGURATION_FOR_QUERY); + assertNull(result.getNextPage()); + assertNull(result.getNextPageToken()); + assertFalse(result.hasNextPage()); + assertThat(result.getSchema()).isEqualTo(TABLE_SCHEMA); + assertThat(result.getTotalRows()).isEqualTo(1); + for (FieldValueList row : result.getValues()) { + assertThat(row.get(0).getBooleanValue()).isFalse(); + assertThat(row.get(1).getLongValue()).isEqualTo(1); + } + + QueryRequest requestPb = requestPbCapture.getValue(); + assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.getQuery(), requestPb.getQuery()); + assertEquals( + QUERY_JOB_CONFIGURATION_FOR_QUERY.getDefaultDataset().getDataset(), + requestPb.getDefaultDataset().getDatasetId()); + assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.useQueryCache(), requestPb.getUseQueryCache()); + assertEquals(LOCATION, requestPb.getLocation()); + + verify(bigqueryRpcMock) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); + } + + @Test + void testFastQueryMultiplePages() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job responseJob = new com.google.api.services.bigquery.model.Job() @@ -1920,12 +2398,10 @@ public void testFastQueryMultiplePages() throws InterruptedException { .setId(JOB) .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")); responseJob.getConfiguration().getQuery().setDestinationTable(TABLE_ID.toPb()); - when(bigqueryRpcMock.getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)).thenReturn(responseJob); - when(bigqueryRpcMock.listTableData( - PROJECT, - DATASET, - TABLE, - BigQueryImpl.optionMap(BigQuery.TableDataListOption.pageToken(CURSOR)))) + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + .thenReturn(responseJob); + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, optionMap(BigQuery.TableDataListOption.pageToken(CURSOR)))) .thenReturn( new TableDataList() .setPageToken(CURSOR) @@ -1944,7 +2420,7 @@ public void testFastQueryMultiplePages() throws InterruptedException { .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenReturn(queryResponsePb); bigquery = options.getService(); @@ -1960,18 +2436,16 @@ public void testFastQueryMultiplePages() throws InterruptedException { requestPb.getDefaultDataset().getDatasetId()); assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.useQueryCache(), requestPb.getUseQueryCache()); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); verify(bigqueryRpcMock) - .listTableData( - PROJECT, - DATASET, - TABLE, - BigQueryImpl.optionMap(BigQuery.TableDataListOption.pageToken(CURSOR))); - verify(bigqueryRpcMock).queryRpc(eq(PROJECT), requestPbCapture.capture()); + .listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, optionMap(BigQuery.TableDataListOption.pageToken(CURSOR))); + verify(bigqueryRpcMock) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testFastQuerySlowDdl() throws InterruptedException { + void testFastQuerySlowDdl() throws InterruptedException, IOException { // mock new fast query path response when running a query that takes more than 10s JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.QueryResponse queryResponsePb = @@ -1998,14 +2472,16 @@ public void testFastQuerySlowDdl() throws InterruptedException { .setTotalRows(BigInteger.valueOf(1L)) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenReturn(queryResponsePb); responseJob.getConfiguration().getQuery().setDestinationTable(TABLE_ID.toPb()); - when(bigqueryRpcMock.getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)).thenReturn(responseJob); - when(bigqueryRpcMock.getQueryResults( - PROJECT, JOB, null, BigQueryImpl.optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) + when(bigqueryRpcMock.getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + .thenReturn(responseJob); + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) .thenReturn(queryResultsResponsePb); - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS)) .thenReturn(new TableDataList().setRows(ImmutableList.of(TABLE_ROW)).setTotalRows(1L)); bigquery = options.getService(); @@ -2024,16 +2500,18 @@ public void testFastQuerySlowDdl() throws InterruptedException { requestPb.getDefaultDataset().getDatasetId()); assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.useQueryCache(), requestPb.getUseQueryCache()); - verify(bigqueryRpcMock).queryRpc(eq(PROJECT), requestPbCapture.capture()); - verify(bigqueryRpcMock).getJob(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); verify(bigqueryRpcMock) - .getQueryResults( - PROJECT, JOB, null, BigQueryImpl.optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock).getJobSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, EMPTY_RPC_OPTIONS); } @Test - public void testQueryRequestCompletedOptions() throws InterruptedException { + void testQueryRequestCompletedOptions() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job jobResponsePb = new com.google.api.services.bigquery.model.Job() @@ -2053,7 +2531,7 @@ public void testQueryRequestCompletedOptions() throws InterruptedException { .setTotalRows(BigInteger.valueOf(1L)) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.create( + when(bigqueryRpcMock.createSkipExceptionTranslation( JOB_INFO.toPb(), Collections.emptyMap())) .thenReturn(jobResponsePb); @@ -2061,10 +2539,10 @@ public void testQueryRequestCompletedOptions() throws InterruptedException { QueryResultsOption pageSizeOption = QueryResultsOption.pageSize(42L); optionMap.put(pageSizeOption.getRpcOption(), pageSizeOption.getValue()); - when(bigqueryRpcMock.getQueryResults( - PROJECT, JOB, null, BigQueryImpl.optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) .thenReturn(responsePb); - when(bigqueryRpcMock.listTableData(PROJECT, DATASET, TABLE, optionMap)) + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, optionMap)) .thenReturn( new TableDataList() .setPageToken("") @@ -2081,15 +2559,17 @@ public void testQueryRequestCompletedOptions() throws InterruptedException { assertThat(row.get(1).getLongValue()).isEqualTo(1); } verify(bigqueryRpcMock) - .create(JOB_INFO.toPb(), Collections.emptyMap()); + .createSkipExceptionTranslation( + JOB_INFO.toPb(), Collections.emptyMap()); verify(bigqueryRpcMock) - .getQueryResults( - PROJECT, JOB, null, BigQueryImpl.optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); - verify(bigqueryRpcMock).listTableData(PROJECT, DATASET, TABLE, optionMap); + .getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); + verify(bigqueryRpcMock) + .listTableDataSkipExceptionTranslation(PROJECT, DATASET, TABLE, optionMap); } @Test - public void testQueryRequestCompletedOnSecondAttempt() throws InterruptedException { + void testQueryRequestCompletedOnSecondAttempt() throws InterruptedException, IOException { JobId queryJob = JobId.of(PROJECT, JOB); com.google.api.services.bigquery.model.Job jobResponsePb1 = new com.google.api.services.bigquery.model.Job() @@ -2114,16 +2594,16 @@ public void testQueryRequestCompletedOnSecondAttempt() throws InterruptedExcepti .setTotalRows(BigInteger.valueOf(1L)) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.create( + when(bigqueryRpcMock.createSkipExceptionTranslation( JOB_INFO.toPb(), Collections.emptyMap())) .thenReturn(jobResponsePb1); - when(bigqueryRpcMock.getQueryResults( - PROJECT, JOB, null, BigQueryImpl.optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) .thenReturn(responsePb1); - when(bigqueryRpcMock.getQueryResults( - PROJECT, JOB, null, BigQueryImpl.optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS))) .thenReturn(responsePb2); - when(bigqueryRpcMock.listTableData( + when(bigqueryRpcMock.listTableDataSkipExceptionTranslation( PROJECT, DATASET, TABLE, Collections.emptyMap())) .thenReturn( new TableDataList() @@ -2140,19 +2620,44 @@ public void testQueryRequestCompletedOnSecondAttempt() throws InterruptedExcepti assertThat(row.get(1).getLongValue()).isEqualTo(1); } verify(bigqueryRpcMock) - .create(JOB_INFO.toPb(), Collections.emptyMap()); + .createSkipExceptionTranslation( + JOB_INFO.toPb(), Collections.emptyMap()); verify(bigqueryRpcMock) - .getQueryResults( - PROJECT, JOB, null, BigQueryImpl.optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); + .getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); verify(bigqueryRpcMock) - .getQueryResults( - PROJECT, JOB, null, BigQueryImpl.optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); + .getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, optionMap(Job.DEFAULT_QUERY_WAIT_OPTIONS)); verify(bigqueryRpcMock) - .listTableData(PROJECT, DATASET, TABLE, Collections.emptyMap()); + .listTableDataSkipExceptionTranslation( + PROJECT, DATASET, TABLE, Collections.emptyMap()); } @Test - public void testGetQueryResults() { + void testQueryWithTimeoutSetsTimeout() throws InterruptedException, IOException { + com.google.api.services.bigquery.model.QueryResponse queryResponsePb = + new com.google.api.services.bigquery.model.QueryResponse() + .setCacheHit(false) + .setJobComplete(true) + .setKind("bigquery#queryResponse") + .setPageToken(null) + .setRows(ImmutableList.of(TABLE_ROW)) + .setSchema(TABLE_SCHEMA.toPb()) + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) + .thenReturn(queryResponsePb); + + bigquery = options.getService(); + Object result = bigquery.queryWithTimeout(QUERY_JOB_CONFIGURATION_FOR_QUERY, null, 1000L); + assertTrue(result instanceof TableResult); + QueryRequest requestPb = requestPbCapture.getValue(); + assertEquals((Long) 1000L, requestPb.getTimeoutMs()); + } + + @Test + void testGetQueryResults() throws IOException { JobId queryJob = JobId.of(JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2164,17 +2669,63 @@ public void testGetQueryResults() { .setPageToken(CURSOR) .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.getQueryResults(PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(responsePb); bigquery = options.getService(); QueryResponse response = bigquery.getQueryResults(queryJob); assertEquals(true, response.getCompleted()); assertEquals(null, response.getSchema()); - verify(bigqueryRpcMock).getQueryResults(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getQueryResultsSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetQueryResultsWithProject() { + void testGetQueryResultsRetry() throws IOException { + JobId queryJob = JobId.of(JOB); + GetQueryResultsResponse responsePb = + new GetQueryResultsResponse() + .setEtag("etag") + .setJobReference(queryJob.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken(CURSOR) + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)); + + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + .thenThrow(new BigQueryException(500, "InternalError")) + .thenThrow(new BigQueryException(502, "Bad Gateway")) + .thenThrow(new BigQueryException(503, "Service Unavailable")) + .thenThrow(new BigQueryException(504, "Gateway Timeout")) + .thenThrow( + new BigQueryException( + 400, + BigQueryErrorMessages + .RATE_LIMIT_EXCEEDED_MSG)) // retrial on based on RATE_LIMIT_EXCEEDED_MSG + .thenReturn(responsePb); + + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + QueryResponse response = bigquery.getQueryResults(queryJob); + assertEquals(true, response.getCompleted()); + assertEquals(null, response.getSchema()); + // IMP: Unable to test for idempotency of the requests using getQueryResults(PROJECT, JOB, null, + // EMPTY_RPC_OPTIONS) as there is no + // identifier in this method which will can potentially differ and which can be used to + // establish idempotency + verify(bigqueryRpcMock, times(6)) + .getQueryResultsSkipExceptionTranslation(PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + } + + @Test + void testGetQueryResultsWithProject() throws IOException { JobId queryJob = JobId.of(OTHER_PROJECT, JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2186,17 +2737,19 @@ public void testGetQueryResultsWithProject() { .setPageToken(CURSOR) .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.getQueryResults(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS)) .thenReturn(responsePb); bigquery = options.getService(); QueryResponse response = bigquery.getQueryResults(queryJob); assertTrue(response.getCompleted()); assertEquals(null, response.getSchema()); - verify(bigqueryRpcMock).getQueryResults(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getQueryResultsSkipExceptionTranslation(OTHER_PROJECT, JOB, null, EMPTY_RPC_OPTIONS); } @Test - public void testGetQueryResultsWithOptions() { + void testGetQueryResultsWithOptions() throws IOException { JobId queryJob = JobId.of(PROJECT, JOB); GetQueryResultsResponse responsePb = new GetQueryResultsResponse() @@ -2207,7 +2760,8 @@ public void testGetQueryResultsWithOptions() { .setPageToken(CURSOR) .setTotalBytesProcessed(42L) .setTotalRows(BigInteger.valueOf(1L)); - when(bigqueryRpcMock.getQueryResults(PROJECT, JOB, null, QUERY_RESULTS_OPTIONS)) + when(bigqueryRpcMock.getQueryResultsSkipExceptionTranslation( + PROJECT, JOB, null, QUERY_RESULTS_OPTIONS)) .thenReturn(responsePb); bigquery = options.getService(); QueryResponse response = @@ -2219,84 +2773,77 @@ public void testGetQueryResultsWithOptions() { QUERY_RESULTS_OPTION_PAGE_TOKEN); assertEquals(true, response.getCompleted()); assertEquals(null, response.getSchema()); - verify(bigqueryRpcMock).getQueryResults(PROJECT, JOB, null, QUERY_RESULTS_OPTIONS); + verify(bigqueryRpcMock) + .getQueryResultsSkipExceptionTranslation(PROJECT, JOB, null, QUERY_RESULTS_OPTIONS); } @Test - public void testGetDatasetRetryableException() { - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + void testGetDatasetRetryableException() throws IOException { + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(500, "InternalError")) .thenReturn(DATASET_INFO_WITH_PROJECT.toPb()); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); Dataset dataset = bigquery.getDataset(DATASET); assertEquals( new Dataset(bigquery, new DatasetInfo.BuilderImpl(DATASET_INFO_WITH_PROJECT)), dataset); - verify(bigqueryRpcMock, times(2)).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock, times(2)) + .getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testNonRetryableException() { + void testNonRetryableException() throws IOException { String exceptionMessage = "Not Implemented"; - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(501, exceptionMessage)); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); - try { - bigquery.getDataset(DatasetId.of(DATASET)); - Assert.fail(); - } catch (BigQueryException ex) { - Assert.assertEquals(exceptionMessage, ex.getMessage()); - } - verify(bigqueryRpcMock).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + BigQueryException ex = + Assertions.assertThrows( + BigQueryException.class, () -> bigquery.getDataset(DatasetId.of(DATASET))); + assertEquals(exceptionMessage, ex.getMessage()); + verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testRuntimeException() { + void testRuntimeException() throws IOException { String exceptionMessage = "Artificial runtime exception"; - when(bigqueryRpcMock.getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) .thenThrow(new RuntimeException(exceptionMessage)); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); - try { - bigquery.getDataset(DATASET); - Assert.fail(); - } catch (BigQueryException ex) { - Assert.assertTrue(ex.getMessage().endsWith(exceptionMessage)); - } - verify(bigqueryRpcMock).getDataset(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + BigQueryException ex = + Assertions.assertThrows(BigQueryException.class, () -> bigquery.getDataset(DATASET)); + assertTrue(ex.getMessage().endsWith(exceptionMessage)); + verify(bigqueryRpcMock).getDatasetSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testQueryDryRun() throws Exception { + void testQueryDryRun() throws Exception { // https://github.com/googleapis/google-cloud-java/issues/2479 - try { - options - .toBuilder() - .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) - .build() - .getService() - .query(QueryJobConfiguration.newBuilder("foo").setDryRun(true).build()); - Assert.fail(); - } catch (UnsupportedOperationException ex) { - Assert.assertNotNull(ex.getMessage()); - } + UnsupportedOperationException ex = + Assertions.assertThrows( + UnsupportedOperationException.class, + () -> + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService() + .query(QueryJobConfiguration.newBuilder("foo").setDryRun(true).build())); + Assertions.assertNotNull(ex.getMessage()); } @Test - public void testFastQuerySQLShouldRetry() throws Exception { + void testFastQuerySQLShouldRetry() throws Exception { com.google.api.services.bigquery.model.QueryResponse responsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -2307,7 +2854,7 @@ public void testFastQuerySQLShouldRetry() throws Exception { .setTotalRows(BigInteger.valueOf(1L)) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenThrow(new BigQueryException(500, "InternalError")) .thenThrow(new BigQueryException(502, "Bad Gateway")) .thenThrow(new BigQueryException(503, "Service Unavailable")) @@ -2315,8 +2862,7 @@ public void testFastQuerySQLShouldRetry() throws Exception { .thenReturn(responsePb); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -2333,11 +2879,12 @@ public void testFastQuerySQLShouldRetry() throws Exception { } assertTrue(idempotent); - verify(bigqueryRpcMock, times(5)).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock, times(5)) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testFastQueryDMLShouldRetry() throws Exception { + void testFastQueryDMLShouldRetry() throws Exception { com.google.api.services.bigquery.model.QueryResponse responsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -2348,7 +2895,7 @@ public void testFastQueryDMLShouldRetry() throws Exception { .setNumDmlAffectedRows(1L) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenThrow(new BigQueryException(500, "InternalError")) .thenThrow(new BigQueryException(502, "Bad Gateway")) .thenThrow(new BigQueryException(503, "Service Unavailable")) @@ -2356,8 +2903,7 @@ public void testFastQueryDMLShouldRetry() throws Exception { .thenReturn(responsePb); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -2374,11 +2920,84 @@ public void testFastQueryDMLShouldRetry() throws Exception { } assertTrue(idempotent); - verify(bigqueryRpcMock, times(5)).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock, times(5)) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testFastQueryDDLShouldRetry() throws Exception { + void testFastQueryRateLimitIdempotency() throws Exception { + com.google.api.services.bigquery.model.QueryResponse responsePb = + new com.google.api.services.bigquery.model.QueryResponse() + .setCacheHit(false) + .setJobComplete(true) + .setRows(ImmutableList.of(TABLE_ROW)) + .setPageToken(null) + .setTotalBytesProcessed(42L) + .setNumDmlAffectedRows(1L) + .setSchema(TABLE_SCHEMA.toPb()); + + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) + .thenThrow(new BigQueryException(500, "InternalError")) + .thenThrow(new BigQueryException(502, "Bad Gateway")) + .thenThrow(new BigQueryException(503, "Service Unavailable")) + .thenThrow(new BigQueryException(504, "Gateway Timeout")) + .thenThrow( + new BigQueryException( + 400, RATE_LIMIT_ERROR_MSG)) // retrial on based on RATE_LIMIT_EXCEEDED_MSG + .thenReturn(responsePb); + + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + + TableResult response = bigquery.query(QUERY_JOB_CONFIGURATION_FOR_DMLQUERY); + assertEquals(TABLE_SCHEMA, response.getSchema()); + assertEquals(1, response.getTotalRows()); + + List allRequests = requestPbCapture.getAllValues(); + boolean idempotent = true; + String firstRequestId = allRequests.get(0).getRequestId(); + for (QueryRequest request : allRequests) { + idempotent = + idempotent + && request + .getRequestId() + .equals(firstRequestId); // all the requestIds should be the same + } + + assertTrue(idempotent); + verify(bigqueryRpcMock, times(6)) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); + } + + @Test + void testRateLimitRegEx() throws Exception { + String msg2 = + "Job eceeded rate limits: Your table exceeded quota for table update operations. For more information, see https://cloud.google.com/bigquery/docs/troubleshoot-quotas"; + String msg3 = "exceeded rate exceeded quota for table update"; + String msg4 = "exceeded rate limits"; + assertTrue( + BigQueryRetryAlgorithm.matchRegEx( + BigQueryErrorMessages.RetryRegExPatterns.RATE_LIMIT_EXCEEDED_REGEX, + RATE_LIMIT_ERROR_MSG)); + assertFalse( + BigQueryRetryAlgorithm.matchRegEx( + BigQueryErrorMessages.RetryRegExPatterns.RATE_LIMIT_EXCEEDED_REGEX, + msg2.toLowerCase())); + assertFalse( + BigQueryRetryAlgorithm.matchRegEx( + BigQueryErrorMessages.RetryRegExPatterns.RATE_LIMIT_EXCEEDED_REGEX, + msg3.toLowerCase())); + assertTrue( + BigQueryRetryAlgorithm.matchRegEx( + BigQueryErrorMessages.RetryRegExPatterns.RATE_LIMIT_EXCEEDED_REGEX, + msg4.toLowerCase())); + } + + @Test + void testFastQueryDDLShouldRetry() throws Exception { com.google.api.services.bigquery.model.QueryResponse responsePb = new com.google.api.services.bigquery.model.QueryResponse() .setCacheHit(false) @@ -2388,7 +3007,7 @@ public void testFastQueryDDLShouldRetry() throws Exception { .setTotalBytesProcessed(42L) .setSchema(TABLE_SCHEMA.toPb()); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())) + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) .thenThrow(new BigQueryException(500, "InternalError")) .thenThrow(new BigQueryException(502, "Bad Gateway")) .thenThrow(new BigQueryException(503, "Service Unavailable")) @@ -2396,8 +3015,7 @@ public void testFastQueryDDLShouldRetry() throws Exception { .thenReturn(responsePb); bigquery = - options - .toBuilder() + options.toBuilder() .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) .build() .getService(); @@ -2414,11 +3032,12 @@ public void testFastQueryDDLShouldRetry() throws Exception { } assertTrue(idempotent); - verify(bigqueryRpcMock, times(5)).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock, times(5)) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testFastQueryBigQueryException() throws InterruptedException { + void testFastQueryBigQueryException() throws InterruptedException, IOException { List errorProtoList = ImmutableList.of( new ErrorProto() @@ -2435,15 +3054,14 @@ public void testFastQueryBigQueryException() throws InterruptedException { .setPageToken(null) .setErrors(errorProtoList); - when(bigqueryRpcMock.queryRpc(eq(PROJECT), requestPbCapture.capture())).thenReturn(responsePb); + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture())) + .thenReturn(responsePb); bigquery = options.getService(); - try { - bigquery.query(QUERY_JOB_CONFIGURATION_FOR_QUERY); - fail("BigQueryException expected"); - } catch (BigQueryException ex) { - assertEquals(Lists.transform(errorProtoList, BigQueryError.FROM_PB_FUNCTION), ex.getErrors()); - } + BigQueryException ex = + Assertions.assertThrows( + BigQueryException.class, () -> bigquery.query(QUERY_JOB_CONFIGURATION_FOR_QUERY)); + assertEquals(Lists.transform(errorProtoList, BigQueryError.FROM_PB_FUNCTION), ex.getErrors()); QueryRequest requestPb = requestPbCapture.getValue(); assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.getQuery(), requestPb.getQuery()); @@ -2451,122 +3069,130 @@ public void testFastQueryBigQueryException() throws InterruptedException { QUERY_JOB_CONFIGURATION_FOR_QUERY.getDefaultDataset().getDataset(), requestPb.getDefaultDataset().getDatasetId()); assertEquals(QUERY_JOB_CONFIGURATION_FOR_QUERY.useQueryCache(), requestPb.getUseQueryCache()); - verify(bigqueryRpcMock).queryRpc(eq(PROJECT), requestPbCapture.capture()); + verify(bigqueryRpcMock) + .queryRpcSkipExceptionTranslation(eq(PROJECT), requestPbCapture.capture()); } @Test - public void testCreateRoutine() { + void testCreateRoutine() throws IOException { RoutineInfo routineInfo = ROUTINE_INFO.setProjectId(OTHER_PROJECT); - when(bigqueryRpcMock.create(routineInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.createSkipExceptionTranslation(routineInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(routineInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Routine actualRoutine = bigquery.create(routineInfo); assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(routineInfo)), actualRoutine); - verify(bigqueryRpcMock).create(routineInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).createSkipExceptionTranslation(routineInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testGetRoutine() { - when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) + void testGetRoutine() throws IOException { + when(bigqueryRpcMock.getRoutineSkipExceptionTranslation( + PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) .thenReturn(ROUTINE_INFO.toPb()); bigquery = options.getService(); Routine routine = bigquery.getRoutine(DATASET, ROUTINE); assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO)), routine); - verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); } @Test - public void testGetRoutineWithRountineId() { - when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) + void testGetRoutineWithRountineId() throws IOException { + when(bigqueryRpcMock.getRoutineSkipExceptionTranslation( + PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) .thenReturn(ROUTINE_INFO.toPb()); bigquery = options.getService(); Routine routine = bigquery.getRoutine(ROUTINE_ID); assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO)), routine); - verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .getRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); } @Test - public void testGetRoutineWithEnabledThrowNotFoundException() { - when(bigqueryRpcMock.getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) - .thenReturn(null) + void testGetRoutineWithEnabledThrowNotFoundException() throws IOException { + when(bigqueryRpcMock.getRoutineSkipExceptionTranslation( + PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS)) .thenThrow(new BigQueryException(404, "Routine not found")); options.setThrowNotFound(true); bigquery = options.getService(); - try { - Routine routine = bigquery.getRoutine(ROUTINE_ID); - fail(); - } catch (BigQueryException ex) { - assertEquals("Routine not found", ex.getMessage()); - } - verify(bigqueryRpcMock).getRoutine(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); + BigQueryException ex = + Assertions.assertThrows(BigQueryException.class, () -> bigquery.getRoutine(ROUTINE_ID)); + assertEquals("Routine not found", ex.getMessage()); + verify(bigqueryRpcMock) + .getRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE, EMPTY_RPC_OPTIONS); } @Test - public void testUpdateRoutine() { + void testUpdateRoutine() throws IOException { RoutineInfo updatedRoutineInfo = - ROUTINE_INFO - .setProjectId(OTHER_PROJECT) - .toBuilder() + ROUTINE_INFO.setProjectId(OTHER_PROJECT).toBuilder() .setDescription("newDescription") .build(); - when(bigqueryRpcMock.update(updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.updateSkipExceptionTranslation( + updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS)) .thenReturn(updatedRoutineInfo.toPb()); BigQueryOptions bigQueryOptions = createBigQueryOptionsForProject(OTHER_PROJECT, rpcFactoryMock); bigquery = bigQueryOptions.getService(); Routine routine = bigquery.update(updatedRoutineInfo); assertEquals(new Routine(bigquery, new RoutineInfo.BuilderImpl(updatedRoutineInfo)), routine); - verify(bigqueryRpcMock).update(updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .updateSkipExceptionTranslation(updatedRoutineInfo.toPb(), EMPTY_RPC_OPTIONS); } @Test - public void testListRoutines() { + void testListRoutines() throws IOException { bigquery = options.getService(); ImmutableList routineList = ImmutableList.of(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(routineList, RoutineInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listRoutinesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listRoutines(DATASET); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(routineList.toArray(), Iterables.toArray(page.getValues(), Routine.class)); - verify(bigqueryRpcMock).listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listRoutinesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testListRoutinesWithDatasetId() { + void testListRoutinesWithDatasetId() throws IOException { bigquery = options.getService(); ImmutableList routineList = ImmutableList.of(new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO))); Tuple> result = Tuple.of(CURSOR, Iterables.transform(routineList, RoutineInfo.TO_PB_FUNCTION)); - when(bigqueryRpcMock.listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS)).thenReturn(result); + when(bigqueryRpcMock.listRoutinesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS)) + .thenReturn(result); Page page = bigquery.listRoutines(DatasetId.of(PROJECT, DATASET)); assertEquals(CURSOR, page.getNextPageToken()); assertArrayEquals(routineList.toArray(), Iterables.toArray(page.getValues(), Routine.class)); - verify(bigqueryRpcMock).listRoutines(PROJECT, DATASET, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .listRoutinesSkipExceptionTranslation(PROJECT, DATASET, EMPTY_RPC_OPTIONS); } @Test - public void testDeleteRoutine() { - when(bigqueryRpcMock.deleteRoutine(PROJECT, DATASET, ROUTINE)).thenReturn(true); + void testDeleteRoutine() throws IOException { + when(bigqueryRpcMock.deleteRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE)) + .thenReturn(true); bigquery = options.getService(); assertTrue(bigquery.delete(ROUTINE_ID)); - verify(bigqueryRpcMock).deleteRoutine(PROJECT, DATASET, ROUTINE); + verify(bigqueryRpcMock).deleteRoutineSkipExceptionTranslation(PROJECT, DATASET, ROUTINE); } @Test - public void testWriteWithJob() throws IOException { + void testWriteWithJob() throws IOException { bigquery = options.getService(); Job job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -2574,24 +3200,25 @@ public void testWriteWithJob() throws IOException { assertEquals(job, writer.getJob()); bigquery.writer(JOB_INFO.getJobId(), LOAD_CONFIGURATION); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } @Test - public void testWriteChannel() throws IOException { + void testWriteChannel() throws IOException { bigquery = options.getService(); Job job = new Job(bigquery, new JobInfo.BuilderImpl(JOB_INFO)); - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -2599,43 +3226,47 @@ public void testWriteChannel() throws IOException { assertEquals(job, writer.getJob()); bigquery.writer(LOAD_CONFIGURATION); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } @Test - public void testGetIamPolicy() { + void testGetIamPolicy() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final com.google.api.services.bigquery.model.Policy apiPolicy = PolicyHelper.convertToApiPolicy(SAMPLE_IAM_POLICY); - when(bigqueryRpcMock.getIamPolicy(resourceId, EMPTY_RPC_OPTIONS)).thenReturn(apiPolicy); + when(bigqueryRpcMock.getIamPolicySkipExceptionTranslation(resourceId, EMPTY_RPC_OPTIONS)) + .thenReturn(apiPolicy); bigquery = options.getService(); Policy policy = bigquery.getIamPolicy(TABLE_ID); assertEquals(policy, SAMPLE_IAM_POLICY); - verify(bigqueryRpcMock).getIamPolicy(resourceId, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock).getIamPolicySkipExceptionTranslation(resourceId, EMPTY_RPC_OPTIONS); } @Test - public void testSetIamPolicy() { + void testSetIamPolicy() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final com.google.api.services.bigquery.model.Policy apiPolicy = PolicyHelper.convertToApiPolicy(SAMPLE_IAM_POLICY); - when(bigqueryRpcMock.setIamPolicy(resourceId, apiPolicy, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.setIamPolicySkipExceptionTranslation( + resourceId, apiPolicy, EMPTY_RPC_OPTIONS)) .thenReturn(apiPolicy); bigquery = options.getService(); Policy returnedPolicy = bigquery.setIamPolicy(TABLE_ID, SAMPLE_IAM_POLICY); assertEquals(returnedPolicy, SAMPLE_IAM_POLICY); - verify(bigqueryRpcMock).setIamPolicy(resourceId, apiPolicy, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .setIamPolicySkipExceptionTranslation(resourceId, apiPolicy, EMPTY_RPC_OPTIONS); } @Test - public void testTestIamPermissions() { + void testTestIamPermissions() throws IOException { final String resourceId = String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); final List checkedPermissions = ImmutableList.of("foo", "bar", "baz"); @@ -2643,11 +3274,34 @@ public void testTestIamPermissions() { final com.google.api.services.bigquery.model.TestIamPermissionsResponse response = new com.google.api.services.bigquery.model.TestIamPermissionsResponse() .setPermissions(grantedPermissions); - when(bigqueryRpcMock.testIamPermissions(resourceId, checkedPermissions, EMPTY_RPC_OPTIONS)) + when(bigqueryRpcMock.testIamPermissionsSkipExceptionTranslation( + resourceId, checkedPermissions, EMPTY_RPC_OPTIONS)) .thenReturn(response); bigquery = options.getService(); List perms = bigquery.testIamPermissions(TABLE_ID, checkedPermissions); assertEquals(perms, grantedPermissions); - verify(bigqueryRpcMock).testIamPermissions(resourceId, checkedPermissions, EMPTY_RPC_OPTIONS); + verify(bigqueryRpcMock) + .testIamPermissionsSkipExceptionTranslation( + resourceId, checkedPermissions, EMPTY_RPC_OPTIONS); + } + + @Test + void testTestIamPermissionsWhenNoPermissionsGranted() throws IOException { + final String resourceId = + String.format("projects/%s/datasets/%s/tables/%s", PROJECT, DATASET, TABLE); + final List checkedPermissions = ImmutableList.of("foo", "bar", "baz"); + // If caller has no permissions, TestIamPermissionsResponse.permissions will be null + final com.google.api.services.bigquery.model.TestIamPermissionsResponse response = + new com.google.api.services.bigquery.model.TestIamPermissionsResponse() + .setPermissions(null); + when(bigqueryRpcMock.testIamPermissionsSkipExceptionTranslation( + resourceId, checkedPermissions, EMPTY_RPC_OPTIONS)) + .thenReturn(response); + bigquery = options.getService(); + List perms = bigquery.testIamPermissions(TABLE_ID, checkedPermissions); + assertEquals(perms, ImmutableList.of()); + verify(bigqueryRpcMock) + .testIamPermissionsSkipExceptionTranslation( + resourceId, checkedPermissions, EMPTY_RPC_OPTIONS); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java index 4176ec24dc..050deba4af 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java @@ -16,23 +16,80 @@ package com.google.cloud.bigquery; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + import com.google.cloud.TransportOptions; -import org.junit.Assert; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) public class BigQueryOptionsTest { @Test - public void testInvalidTransport() { - try { - BigQueryOptions.newBuilder().setTransportOptions(Mockito.mock(TransportOptions.class)); - Assert.fail(); - } catch (IllegalArgumentException expected) { - Assert.assertNotNull(expected.getMessage()); - } + void testInvalidTransport() { + IllegalArgumentException expected = + assertThrows( + IllegalArgumentException.class, + () -> + BigQueryOptions.newBuilder() + .setTransportOptions(Mockito.mock(TransportOptions.class))); + assertNotNull(expected.getMessage()); + } + + @Test + void dataFormatOptions_createdByDefault() { + BigQueryOptions options = BigQueryOptions.newBuilder().setProjectId("project-id").build(); + + assertNotNull(options.getDataFormatOptions()); + assertFalse(options.getDataFormatOptions().useInt64Timestamp()); + assertEquals( + DataFormatOptions.TimestampFormatOptions.TIMESTAMP_OUTPUT_FORMAT_UNSPECIFIED, + options.getDataFormatOptions().timestampFormatOptions()); + } + + @Test + void nonBuilderSetUseInt64Timestamp_capturedInDataFormatOptions() { + BigQueryOptions options = + BigQueryOptions.newBuilder() + .setDataFormatOptions(DataFormatOptions.newBuilder().useInt64Timestamp(false).build()) + .setProjectId("project-id") + .build(); + options.setUseInt64Timestamps(true); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } + + @Test + void nonBuilderSetUseInt64Timestamp_overridesEverything() { + BigQueryOptions options = BigQueryOptions.newBuilder().setProjectId("project-id").build(); + options.setUseInt64Timestamps(true); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } + + @Test + void noDataFormatOptions_capturesUseInt64TimestampSetInBuilder() { + BigQueryOptions options = + BigQueryOptions.newBuilder().setUseInt64Timestamps(true).setProjectId("project-id").build(); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); + } + + @Test + void dataFormatOptionsSetterHasPrecedence() { + BigQueryOptions options = + BigQueryOptions.newBuilder() + .setProjectId("project-id") + .setDataFormatOptions(DataFormatOptions.newBuilder().useInt64Timestamp(true).build()) + .setUseInt64Timestamps(false) + .build(); + + assertTrue(options.getDataFormatOptions().useInt64Timestamp()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryResultImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryResultImplTest.java new file mode 100644 index 0000000000..54d0b8e4e4 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryResultImplTest.java @@ -0,0 +1,296 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.ConnectionImpl.EndOfFieldValueList; +import com.google.cloud.bigquery.FieldValue.Attribute; +import com.google.common.collect.ImmutableList; +import com.google.common.io.BaseEncoding; +import java.math.BigDecimal; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Time; +import java.sql.Timestamp; +import java.time.LocalTime; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.LinkedBlockingDeque; +import org.apache.arrow.vector.util.Text; +import org.junit.jupiter.api.Test; + +class BigQueryResultImplTest { + + private static final Schema SCHEMA = + Schema.of( + Field.newBuilder("boolean", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("long", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("double", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("string", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("bytes", StandardSQLTypeName.BYTES).setMode(Field.Mode.NULLABLE).build(), + Field.newBuilder("timestamp", StandardSQLTypeName.TIMESTAMP) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("time", StandardSQLTypeName.TIME).setMode(Field.Mode.NULLABLE).build(), + Field.newBuilder("date", StandardSQLTypeName.DATE).setMode(Field.Mode.NULLABLE).build(), + Field.newBuilder("intArray", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REPEATED) + .build(), + Field.newBuilder("stringArray", StandardSQLTypeName.STRING) + .setMode(Field.Mode.REPEATED) + .build()); + + private static final FieldList FIELD_LIST_SCHEMA = + FieldList.of( + Field.of("boolean", LegacySQLTypeName.BOOLEAN), + Field.of("long", LegacySQLTypeName.INTEGER), + Field.of("double", LegacySQLTypeName.FLOAT), + Field.of("string", LegacySQLTypeName.STRING), + Field.of("bytes", LegacySQLTypeName.BYTES), + Field.of("timestamp", LegacySQLTypeName.TIMESTAMP), + Field.of("time", LegacySQLTypeName.TIME), + Field.of("date", LegacySQLTypeName.DATE), + Field.of("intArray", LegacySQLTypeName.INTEGER), + Field.of("stringArray", LegacySQLTypeName.STRING)); + + private static final byte[] BYTES = {0xD, 0xE, 0xA, 0xD}; + private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES); + private static final Timestamp EXPECTED_TIMESTAMP = Timestamp.valueOf("2025-01-02 03:04:05.0"); + private static final String TIME = "20:21:22"; + private static final Time EXPECTED_TIME = Time.valueOf(LocalTime.of(20, 21, 22)); + private static final String DATE = "2020-01-21"; + private static final int DATE_INT = 0; + private static final Date EXPECTED_DATE = java.sql.Date.valueOf(DATE); + private static final ArrayList EXPECTED_INT_ARRAY = + new ArrayList<>(Arrays.asList(0, 1, 2, 3, 4)); + private static final String[] STRING_ARRAY = {"str1", "str2", "str3"}; + private static final ArrayList EXPECTED_STRING_ARRAY = + new ArrayList<>(Arrays.asList(STRING_ARRAY)); + private static final int BUFFER_SIZE = 10; + + @Test + void testResultSetFieldValueList() throws InterruptedException, SQLException { + BlockingQueue> buffer = new LinkedBlockingDeque<>(BUFFER_SIZE); + FieldValueList fieldValues = + FieldValueList.of( + ImmutableList.of( + FieldValue.of(Attribute.PRIMITIVE, "false"), + FieldValue.of(Attribute.PRIMITIVE, "1"), + FieldValue.of(Attribute.PRIMITIVE, "1.5"), + FieldValue.of(Attribute.PRIMITIVE, "string_value"), + FieldValue.of(Attribute.PRIMITIVE, BYTES_BASE64), + FieldValue.of( + Attribute.PRIMITIVE, + Long.toString(EXPECTED_TIMESTAMP.getTime() / 1000), + false), // getTime is in milliseconds. + FieldValue.of(Attribute.PRIMITIVE, TIME), + FieldValue.of(Attribute.PRIMITIVE, DATE), + FieldValue.of(Attribute.REPEATED, EXPECTED_INT_ARRAY), + FieldValue.of(Attribute.REPEATED, STRING_ARRAY)), + FIELD_LIST_SCHEMA); + buffer.put(fieldValues); + + FieldValueList nullValues = + FieldValueList.of( + ImmutableList.of( + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.PRIMITIVE, null), + FieldValue.of(Attribute.REPEATED, null), + FieldValue.of(Attribute.REPEATED, null)), + FIELD_LIST_SCHEMA); + buffer.put(nullValues); + + buffer.put(new EndOfFieldValueList()); // End of buffer marker. + + BigQueryResultImpl> bigQueryResult = + new BigQueryResultImpl<>(SCHEMA, 1, buffer, null); + ResultSet resultSet = bigQueryResult.getResultSet(); + assertThat(resultSet.next()).isTrue(); + assertThat(resultSet.getObject("string")).isEqualTo("string_value"); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getString("string")).isEqualTo("string_value"); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getInt("long")).isEqualTo(1); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getLong("long")).isEqualTo(1); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getDouble("double")).isEqualTo(1.5); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBigDecimal("double")).isEqualTo(BigDecimal.valueOf(1.5)); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBoolean("boolean")).isFalse(); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBytes("bytes")).isEqualTo(BYTES); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getTimestamp("timestamp")).isEqualTo(EXPECTED_TIMESTAMP); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getTime("time").getTime()).isEqualTo(EXPECTED_TIME.getTime()); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getDate("date").getTime()).isEqualTo(EXPECTED_DATE.getTime()); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getArray("intArray").getArray()).isEqualTo(EXPECTED_INT_ARRAY); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getArray("stringArray").getArray()).isEqualTo(EXPECTED_STRING_ARRAY); + assertThat(resultSet.wasNull()).isFalse(); + + assertThat(resultSet.next()).isTrue(); + assertThat(resultSet.getObject("string")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getString("string")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getInt("long")).isEqualTo(0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getLong("long")).isEqualTo(0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getDouble("double")).isEqualTo(0.0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBigDecimal("double")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBoolean("boolean")).isFalse(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBytes("bytes")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getTimestamp("timestamp")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getTime("time")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getDate("date")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getArray("intArray")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getArray("stringArray")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + + assertThat(resultSet.next()).isFalse(); + } + + @Test + void testResultSetReadApi() throws InterruptedException, SQLException { + BlockingQueue buffer = new LinkedBlockingDeque<>(BUFFER_SIZE); + + Map rowValues = new HashMap<>(); + rowValues.put("boolean", false); + rowValues.put("long", 1L); + rowValues.put("double", 1.5); + rowValues.put("string", new Text("string_value")); + rowValues.put("bytes", BYTES); + rowValues.put("timestamp", EXPECTED_TIMESTAMP.getTime() * 1000); + rowValues.put("time", EXPECTED_TIME.getTime() * 1000); + rowValues.put("date", DATE_INT); + rowValues.put("intArray", EXPECTED_INT_ARRAY); + rowValues.put("stringArray", STRING_ARRAY); + buffer.put(new BigQueryResultImpl.Row(rowValues)); + + Map nullValues = new HashMap<>(); + nullValues.put("boolean", null); + nullValues.put("long", null); + nullValues.put("double", null); + nullValues.put("string", null); + nullValues.put("bytes", null); + nullValues.put("timestamp", null); + nullValues.put("time", null); + nullValues.put("date", null); + nullValues.put("intArray", null); + nullValues.put("stringArray", null); + buffer.put(new BigQueryResultImpl.Row(nullValues)); + + buffer.put(new BigQueryResultImpl.Row(null, true)); // End of buffer marker. + + BigQueryResultImpl bigQueryResult = + new BigQueryResultImpl<>(SCHEMA, 1, buffer, null); + ResultSet resultSet = bigQueryResult.getResultSet(); + assertThat(resultSet.next()).isTrue(); + assertThat(resultSet.getObject("string")).isEqualTo(new Text("string_value")); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getString("string")).isEqualTo("string_value"); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getInt("long")).isEqualTo(1); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getLong("long")).isEqualTo(1); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getDouble("double")).isEqualTo(1.5); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBigDecimal("double")).isEqualTo(BigDecimal.valueOf(1.5)); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBoolean("boolean")).isFalse(); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getBytes("bytes")).isEqualTo(BYTES); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getTimestamp("timestamp")).isEqualTo(EXPECTED_TIMESTAMP); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getTime("time").getTime()).isEqualTo(EXPECTED_TIME.getTime()); + assertThat(resultSet.wasNull()).isFalse(); + // Do not check date value as Date object do not have timezone but its toString() applies the + // JVM default timezone which causes flakes in non-UTC zones. + assertThat(resultSet.getDate("date")).isNotNull(); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getArray("intArray")).isNotNull(); + assertThat(resultSet.wasNull()).isFalse(); + assertThat(resultSet.getArray("stringArray")).isNotNull(); + assertThat(resultSet.wasNull()).isFalse(); + + assertThat(resultSet.next()).isTrue(); + assertThat(resultSet.getObject("string")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getString("string")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getInt("long")).isEqualTo(0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getLong("long")).isEqualTo(0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getDouble("double")).isEqualTo(0.0); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBigDecimal("double")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBoolean("boolean")).isFalse(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getBytes("bytes")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getTimestamp("timestamp")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getTime("time")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getDate("date")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getArray("intArray")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + assertThat(resultSet.getArray("stringArray")).isNull(); + assertThat(resultSet.wasNull()).isTrue(); + + assertThat(resultSet.next()).isFalse(); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java index 88fa1595e3..a11d9b923b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigtableOptionsTest.java @@ -18,8 +18,8 @@ import static com.google.common.truth.Truth.assertThat; import com.google.common.collect.ImmutableList; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; public class BigtableOptionsTest { @@ -55,7 +55,7 @@ public class BigtableOptionsTest { .build(); @Test - public void testConstructors() { + void testConstructors() { // column assertThat(COL1.getQualifierEncoded()).isEqualTo("aaa"); assertThat(COL1.getFieldName()).isEqualTo("field1"); @@ -80,41 +80,36 @@ public void testConstructors() { } @Test - public void testNullPointerException() { - try { - BigtableColumnFamily.newBuilder().setFamilyID(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex.getMessage()).isNotNull(); - } - try { - BigtableColumnFamily.newBuilder().setColumns(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex.getMessage()).isNotNull(); - } - try { - BigtableColumnFamily.newBuilder().setEncoding(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex.getMessage()).isNotNull(); - } - try { - BigtableColumnFamily.newBuilder().setOnlyReadLatest(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex.getMessage()).isNotNull(); - } - try { - BigtableColumnFamily.newBuilder().setType(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex.getMessage()).isNotNull(); - } + void testNullPointerException() { + NullPointerException ex = + Assertions.assertThrows( + NullPointerException.class, + () -> BigtableColumnFamily.newBuilder().setFamilyID(null).build()); + assertThat(ex.getMessage()).isNotNull(); + ex = + Assertions.assertThrows( + NullPointerException.class, + () -> BigtableColumnFamily.newBuilder().setColumns(null).build()); + assertThat(ex.getMessage()).isNotNull(); + ex = + Assertions.assertThrows( + NullPointerException.class, + () -> BigtableColumnFamily.newBuilder().setEncoding(null).build()); + assertThat(ex.getMessage()).isNotNull(); + ex = + Assertions.assertThrows( + NullPointerException.class, + () -> BigtableColumnFamily.newBuilder().setOnlyReadLatest(null).build()); + assertThat(ex.getMessage()).isNotNull(); + ex = + Assertions.assertThrows( + NullPointerException.class, + () -> BigtableColumnFamily.newBuilder().setType(null).build()); + assertThat(ex.getMessage()).isNotNull(); } @Test - public void testIllegalStateException() { + void testIllegalStateException() { try { BigtableColumnFamily.newBuilder().build(); } catch (IllegalStateException ex) { @@ -123,14 +118,14 @@ public void testIllegalStateException() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareBigtableColumn(COL1, BigtableColumn.fromPb(COL1.toPb())); compareBigtableColumnFamily(TESTFAMILY, BigtableColumnFamily.fromPb(TESTFAMILY.toPb())); compareBigtableOptions(OPTIONS, BigtableOptions.fromPb(OPTIONS.toPb())); } @Test - public void testEquals() { + void testEquals() { compareBigtableColumn(COL1, COL1); compareBigtableColumnFamily(TESTFAMILY, TESTFAMILY); assertThat(TESTFAMILY.equals(TESTFAMILY)).isTrue(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CloneDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CloneDefinitionTest.java new file mode 100644 index 0000000000..1a319c947c --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CloneDefinitionTest.java @@ -0,0 +1,58 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; + +public class CloneDefinitionTest { + private static final TableId BASE_TABLE_ID = TableId.of("DATASET_NAME", "BASE_TABLE_NAME"); + private static final String CLONE_TIME = "2021-05-19T11:32:26.553Z"; + private static final CloneDefinition CLONETABLE_DEFINITION = + CloneDefinition.newBuilder().setBaseTableId(BASE_TABLE_ID).setCloneTime(CLONE_TIME).build(); + + @Test + void testToBuilder() { + compareCloneTableDefinition(CLONETABLE_DEFINITION, CLONETABLE_DEFINITION.toBuilder().build()); + CloneDefinition cloneTableDefinition = + CLONETABLE_DEFINITION.toBuilder().setCloneTime("2021-05-20T11:32:26.553Z").build(); + assertEquals("2021-05-20T11:32:26.553Z", cloneTableDefinition.getCloneTime()); + } + + @Test + void testBuilder() { + assertEquals(BASE_TABLE_ID, CLONETABLE_DEFINITION.getBaseTableId()); + assertEquals(CLONE_TIME, CLONETABLE_DEFINITION.getCloneTime()); + CloneDefinition cloneDefinition = + CloneDefinition.newBuilder().setBaseTableId(BASE_TABLE_ID).setCloneTime(CLONE_TIME).build(); + assertEquals(CLONETABLE_DEFINITION, cloneDefinition); + } + + @Test + void testToAndFromPb() { + CloneDefinition cloneDefinition = CLONETABLE_DEFINITION.toBuilder().build(); + assertTrue(CloneDefinition.fromPb(cloneDefinition.toPb()) instanceof CloneDefinition); + compareCloneTableDefinition(cloneDefinition, CloneDefinition.fromPb(cloneDefinition.toPb())); + } + + private void compareCloneTableDefinition(CloneDefinition expected, CloneDefinition value) { + assertEquals(expected.getBaseTableId(), value.getBaseTableId()); + assertEquals(expected.getCloneTime(), value.getCloneTime()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java new file mode 100644 index 0000000000..0c7c75306c --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ColumnReferenceTest.java @@ -0,0 +1,68 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; + +public class ColumnReferenceTest { + private static final ColumnReference COLUMN_REFERENCE = + ColumnReference.newBuilder() + .setReferencingColumn("column1") + .setReferencedColumn("column2") + .build(); + + @Test + void testToBuilder() { + compareColumnReferenceDefinition(COLUMN_REFERENCE, COLUMN_REFERENCE.toBuilder().build()); + ColumnReference columnReference = + COLUMN_REFERENCE.toBuilder() + .setReferencingColumn("col1") + .setReferencedColumn("col2") + .build(); + assertEquals("col1", columnReference.getReferencingColumn()); + assertEquals("col2", columnReference.getReferencedColumn()); + } + + @Test + void testBuilder() { + assertEquals("column1", COLUMN_REFERENCE.getReferencingColumn()); + assertEquals("column2", COLUMN_REFERENCE.getReferencedColumn()); + ColumnReference columnReference = + COLUMN_REFERENCE + .newBuilder() + .setReferencingColumn("column1") + .setReferencedColumn("column2") + .build(); + assertEquals(COLUMN_REFERENCE, columnReference); + } + + @Test + void testToAndFromPb() { + ColumnReference columnReference = COLUMN_REFERENCE.toBuilder().build(); + assertTrue(ColumnReference.fromPb(columnReference.toPb()) instanceof ColumnReference); + compareColumnReferenceDefinition( + columnReference, ColumnReference.fromPb(columnReference.toPb())); + } + + private void compareColumnReferenceDefinition(ColumnReference expected, ColumnReference value) { + assertEquals(expected.getReferencingColumn(), value.getReferencingColumn()); + assertEquals(expected.getReferencedColumn(), value.getReferencedColumn()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java new file mode 100644 index 0000000000..54f9b7a33a --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionImplTest.java @@ -0,0 +1,797 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.doReturn; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import com.google.api.services.bigquery.model.GetQueryResultsResponse; +import com.google.api.services.bigquery.model.QueryParameter; +import com.google.api.services.bigquery.model.QueryParameterType; +import com.google.api.services.bigquery.model.QueryRequest; +import com.google.api.services.bigquery.model.QueryResponse; +import com.google.api.services.bigquery.model.TableCell; +import com.google.api.services.bigquery.model.TableDataList; +import com.google.api.services.bigquery.model.TableRow; +import com.google.api.services.bigquery.model.TableSchema; +import com.google.cloud.ServiceOptions; +import com.google.cloud.Tuple; +import com.google.cloud.bigquery.spi.BigQueryRpcFactory; +import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; +import com.google.common.collect.ImmutableList; +import com.google.common.util.concurrent.ListenableFuture; +import java.io.IOException; +import java.math.BigInteger; +import java.sql.SQLException; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.LinkedBlockingDeque; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class ConnectionImplTest { + private BigQueryOptions options; + private BigQueryRpcFactory rpcFactoryMock; + private HttpBigQueryRpc bigqueryRpcMock; + private Connection connectionMock; + private BigQuery bigquery; + private ConnectionImpl connection; + private static final String PROJECT = "project"; + private static final String JOB = "job"; + private static final String LOCATION = "US"; + private static final String DEFAULT_TEST_DATASET = "bigquery_test_dataset"; + private static final String PAGE_TOKEN = "ABCD123"; + private static final TableId TABLE_NAME = TableId.of(DEFAULT_TEST_DATASET, PROJECT); + private static final TableCell STRING_CELL = new TableCell().setV("Value"); + private static final TableRow TABLE_ROW = new TableRow().setF(ImmutableList.of(STRING_CELL)); + private static final String SQL_QUERY = + "SELECT county, state_name FROM bigquery_test_dataset.large_data_testing_table limit 2"; + private static final String DRY_RUN_SQL = + "SELECT county, state_name FROM bigquery_test_dataset.large_data_testing_table where country = ?"; + private static final int DEFAULT_PAGE_SIZE = 10000; + private ConnectionSettings connectionSettings; + private static final Schema QUERY_SCHEMA = + Schema.of( + Field.newBuilder("country", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("state_name", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + + private static final Schema QUERY_SCHEMA_WITH_INTERVAL_FIELD = + Schema.of( + Field.newBuilder("interval", StandardSQLTypeName.INTERVAL) + .setMode(Field.Mode.NULLABLE) + .build()); + private static final TableSchema FAST_QUERY_TABLESCHEMA = QUERY_SCHEMA.toPb(); + private static final BigQueryResult BQ_RS_MOCK_RES = + new BigQueryResultImpl(QUERY_SCHEMA, 2, null, null); + + private static final BigQueryResult BQ_RS_MOCK_RES_MULTI_PAGE = + new BigQueryResultImpl(QUERY_SCHEMA, 4, null, null); + + private static final JobId QUERY_JOB = JobId.of(PROJECT, JOB).setLocation(LOCATION); + private static final GetQueryResultsResponse GET_QUERY_RESULTS_RESPONSE = + new GetQueryResultsResponse() + .setJobReference(QUERY_JOB.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken(PAGE_TOKEN) + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)) + .setSchema(FAST_QUERY_TABLESCHEMA); + private static final GetQueryResultsResponse GET_QUERY_RESULTS_RESPONSE_EMPTY = + new GetQueryResultsResponse() + .setJobReference(QUERY_JOB.toPb()) + .setJobComplete(true) + .setCacheHit(false) + .setPageToken(PAGE_TOKEN) + .setTotalBytesProcessed(0L) + .setTotalRows(BigInteger.valueOf(0L)) + .setSchema(FAST_QUERY_TABLESCHEMA); + + private static final GetQueryResultsResponse GET_QUERY_RESULTS_RESPONSE_NULL_SCHEMA = + new GetQueryResultsResponse() + .setJobReference(QUERY_JOB.toPb()) + .setRows(ImmutableList.of(TABLE_ROW)) + .setJobComplete(false) + .setPageToken(PAGE_TOKEN) + .setTotalBytesProcessed(42L) + .setTotalRows(BigInteger.valueOf(1L)) + .setSchema(null); + + private static List TABLE_ROWS = + ImmutableList.of( + new TableRow() + .setF( + ImmutableList.of(new TableCell().setV("Value1"), new TableCell().setV("Value2"))), + new TableRow() + .setF( + ImmutableList.of( + new TableCell().setV("Value3"), new TableCell().setV("Value4")))); + + private BigQueryOptions createBigQueryOptionsForProject( + String project, BigQueryRpcFactory rpcFactory) { + return BigQueryOptions.newBuilder() + .setProjectId(project) + .setServiceRpcFactory(rpcFactory) + .setRetrySettings(ServiceOptions.getNoRetrySettings()) + .build(); + } + + @BeforeEach + void setUp() { + rpcFactoryMock = mock(BigQueryRpcFactory.class); + bigqueryRpcMock = mock(HttpBigQueryRpc.class); + connectionMock = mock(Connection.class); + when(rpcFactoryMock.create(any(BigQueryOptions.class))).thenReturn(bigqueryRpcMock); + options = createBigQueryOptionsForProject(PROJECT, rpcFactoryMock); + bigquery = options.getService(); + + connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DEFAULT_TEST_DATASET)) + .setNumBufferedRows(DEFAULT_PAGE_SIZE) + .build(); + bigquery = + options.toBuilder() + .setRetrySettings(ServiceOptions.getDefaultRetrySettings()) + .build() + .getService(); + connection = (ConnectionImpl) bigquery.createConnection(connectionSettings); + assertNotNull(connection); + } + + @Test + void testFastQuerySinglePage() throws BigQuerySQLException, IOException { + com.google.api.services.bigquery.model.QueryResponse mockQueryRes = + new QueryResponse().setSchema(FAST_QUERY_TABLESCHEMA).setJobComplete(true); + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) + .thenReturn(mockQueryRes); + ConnectionImpl connectionSpy = Mockito.spy(connection); + doReturn(BQ_RS_MOCK_RES) + .when(connectionSpy) + .processQueryResponseResults(any(QueryResponse.class)); + + BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); + assertEquals(res.getTotalRows(), 2); + assertEquals(QUERY_SCHEMA, res.getSchema()); + verify(connectionSpy, times(1)) + .processQueryResponseResults( + any(com.google.api.services.bigquery.model.QueryResponse.class)); + } + + @Test + // NOTE: This doesn't truly paginates. Returns a response while mocking + // processQueryResponseResults + void testFastQueryMultiplePages() throws BigQuerySQLException, IOException { + com.google.api.services.bigquery.model.QueryResponse mockQueryRes = + new QueryResponse() + .setSchema(FAST_QUERY_TABLESCHEMA) + .setJobComplete(true) + .setPageToken(PAGE_TOKEN); + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) + .thenReturn(mockQueryRes); + ConnectionImpl connectionSpy = Mockito.spy(connection); + + doReturn(BQ_RS_MOCK_RES_MULTI_PAGE) + .when(connectionSpy) + .processQueryResponseResults( + any(com.google.api.services.bigquery.model.QueryResponse.class)); + + BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); + assertEquals(res.getTotalRows(), 4); + assertEquals(QUERY_SCHEMA, res.getSchema()); + verify(connectionSpy, times(1)) + .processQueryResponseResults( + any(com.google.api.services.bigquery.model.QueryResponse.class)); + } + + @Test + void testClose() throws BigQuerySQLException { + boolean cancelled = connection.close(); + assertTrue(cancelled); + } + + @Test + void testQueryDryRun() throws BigQuerySQLException, IOException { + List queryParametersMock = + ImmutableList.of( + new QueryParameter().setParameterType(new QueryParameterType().setType("STRING"))); + com.google.api.services.bigquery.model.JobStatistics2 queryMock = + new com.google.api.services.bigquery.model.JobStatistics2() + .setSchema(FAST_QUERY_TABLESCHEMA) + .setUndeclaredQueryParameters(queryParametersMock); + com.google.api.services.bigquery.model.JobStatistics jobStatsMock = + new com.google.api.services.bigquery.model.JobStatistics() + .setCreationTime(1234L) + .setStartTime(5678L) + .setQuery(queryMock); + com.google.api.services.bigquery.model.JobConfigurationQuery jobConfigurationQuery = + new com.google.api.services.bigquery.model.JobConfigurationQuery(); + com.google.api.services.bigquery.model.JobConfiguration jobConfig = + new com.google.api.services.bigquery.model.JobConfiguration() + .setQuery(jobConfigurationQuery); + com.google.api.services.bigquery.model.Job mockDryRunJob = + new com.google.api.services.bigquery.model.Job() + .setStatistics(jobStatsMock) + .setConfiguration(jobConfig); + when(bigqueryRpcMock.createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class))) + .thenReturn(mockDryRunJob); + BigQueryDryRunResult dryRunResult = connection.dryRun(DRY_RUN_SQL); + assertEquals(1, dryRunResult.getQueryParameters().size()); + assertEquals(QUERY_SCHEMA, dryRunResult.getSchema()); + verify(bigqueryRpcMock, times(1)) + .createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class)); + } + + @Test + void testQueryDryRunNoQueryParameters() throws BigQuerySQLException, IOException { + com.google.api.services.bigquery.model.JobStatistics2 queryMock = + new com.google.api.services.bigquery.model.JobStatistics2() + .setSchema(FAST_QUERY_TABLESCHEMA); + com.google.api.services.bigquery.model.JobStatistics jobStatsMock = + new com.google.api.services.bigquery.model.JobStatistics() + .setCreationTime(1234L) + .setStartTime(5678L) + .setQuery(queryMock); + com.google.api.services.bigquery.model.JobConfigurationQuery jobConfigurationQuery = + new com.google.api.services.bigquery.model.JobConfigurationQuery(); + com.google.api.services.bigquery.model.JobConfiguration jobConfig = + new com.google.api.services.bigquery.model.JobConfiguration() + .setQuery(jobConfigurationQuery); + com.google.api.services.bigquery.model.Job mockDryRunJob = + new com.google.api.services.bigquery.model.Job() + .setStatistics(jobStatsMock) + .setConfiguration(jobConfig); + when(bigqueryRpcMock.createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class))) + .thenReturn(mockDryRunJob); + BigQueryDryRunResult dryRunResult = connection.dryRun(DRY_RUN_SQL); + assertEquals(0, dryRunResult.getQueryParameters().size()); + assertEquals(QUERY_SCHEMA, dryRunResult.getSchema()); + verify(bigqueryRpcMock, times(1)) + .createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class)); + } + + @Test + void testParseDataTask() throws InterruptedException { + BlockingQueue, Boolean>> pageCache = + new LinkedBlockingDeque<>(2); + BlockingQueue> rpcResponseQueue = new LinkedBlockingDeque<>(2); + rpcResponseQueue.offer(Tuple.of(null, false)); + // This call should populate page cache + ConnectionImpl connectionSpy = Mockito.spy(connection); + connectionSpy.parseRpcDataAsync(TABLE_ROWS, QUERY_SCHEMA, pageCache, rpcResponseQueue); + Tuple, Boolean> fvlTupple = + pageCache.take(); // wait for the parser thread to parse the data + assertNotNull(fvlTupple); + Iterable iterableFvl = fvlTupple.x(); + int rowCnt = 0; + for (FieldValueList fvl : iterableFvl) { + assertEquals(2, fvl.size()); // both the rows should have 2 fields each + rowCnt++; + } + assertEquals(2, rowCnt); // row rows read + + verify(connectionSpy, times(1)) + .parseRpcDataAsync( + any(List.class), any(Schema.class), any(BlockingQueue.class), any(BlockingQueue.class)); + } + + @Test + void testPopulateBuffer() throws InterruptedException { + + BlockingQueue, Boolean>> pageCache = + new LinkedBlockingDeque<>(2); + BlockingQueue> rpcResponseQueue = new LinkedBlockingDeque<>(2); + BlockingQueue> buffer = new LinkedBlockingDeque<>(5); + rpcResponseQueue.offer(Tuple.of(null, false)); + // This call should populate page cache + ConnectionImpl connectionSpy = Mockito.spy(connection); + + connectionSpy.parseRpcDataAsync(TABLE_ROWS, QUERY_SCHEMA, pageCache, rpcResponseQueue); + + verify(connectionSpy, times(1)) + .parseRpcDataAsync( + any(List.class), any(Schema.class), any(BlockingQueue.class), any(BlockingQueue.class)); + + // now pass the pageCache to populateBuffer method + connectionSpy.populateBufferAsync(rpcResponseQueue, pageCache, buffer); + // check if buffer was populated with two rows async by using the blocking take method + AbstractList fvl1 = buffer.take(); + assertNotNull(fvl1); + assertEquals(2, fvl1.size()); + assertEquals("Value1", fvl1.get(0).getValue().toString()); + assertEquals("Value2", fvl1.get(1).getValue().toString()); + AbstractList fvl2 = buffer.take(); + assertNotNull(fvl2); + assertEquals(2, fvl2.size()); + assertEquals("Value3", fvl2.get(0).getValue().toString()); + assertEquals("Value4", fvl2.get(1).getValue().toString()); + verify(connectionSpy, times(1)) + .populateBufferAsync( + any(BlockingQueue.class), any(BlockingQueue.class), any(BlockingQueue.class)); + } + + @Test + void testNextPageTask() throws InterruptedException { + BlockingQueue> rpcResponseQueue = new LinkedBlockingDeque<>(2); + TableDataList mockTabledataList = + new TableDataList() + .setPageToken(PAGE_TOKEN) + .setRows(ImmutableList.of(TABLE_ROW)) + .setTotalRows(1L); + ConnectionImpl connectionSpy = Mockito.spy(connection); + doReturn(mockTabledataList) + .when(connectionSpy) + .tableDataListRpc(any(TableId.class), any(String.class)); + connectionSpy.runNextPageTaskAsync(PAGE_TOKEN, TABLE_NAME, rpcResponseQueue); + Tuple tableDataListTuple = rpcResponseQueue.take(); + assertNotNull(tableDataListTuple); + TableDataList tableDataList = tableDataListTuple.x(); + assertNotNull(tableDataList); + assertEquals("ABCD123", tableDataList.getPageToken()); + assertEquals(Long.valueOf(1), tableDataList.getTotalRows()); + verify(connectionSpy, times(1)) + .runNextPageTaskAsync(any(String.class), any(TableId.class), any(BlockingQueue.class)); + } + + @Test + void testGetQueryResultsFirstPage() throws IOException { + when(bigqueryRpcMock.getQueryResultsWithRowLimitSkipExceptionTranslation( + any(String.class), + any(String.class), + any(String.class), + any(Integer.class), + any(Long.class))) + .thenReturn(GET_QUERY_RESULTS_RESPONSE); + GetQueryResultsResponse response = connection.getQueryResultsFirstPage(QUERY_JOB); + assertNotNull(response); + assertEquals(GET_QUERY_RESULTS_RESPONSE, response); + verify(bigqueryRpcMock, times(1)) + .getQueryResultsWithRowLimitSkipExceptionTranslation( + any(String.class), + any(String.class), + any(String.class), + any(Integer.class), + any(Long.class)); + } + + // calls executeSelect with a nonFast query and exercises createQueryJob + @Test + void testLegacyQuerySinglePage() throws BigQuerySQLException, IOException { + ConnectionImpl connectionSpy = Mockito.spy(connection); + com.google.api.services.bigquery.model.Job jobResponseMock = + new com.google.api.services.bigquery.model.Job() + .setJobReference(QUERY_JOB.toPb()) + .setId(JOB) + .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")); + // emulating a legacy query + doReturn(false).when(connectionSpy).isFastQuerySupported(); + doReturn(GET_QUERY_RESULTS_RESPONSE) + .when(connectionSpy) + .getQueryResultsFirstPage(any(JobId.class)); + doReturn(BQ_RS_MOCK_RES) + .when(connectionSpy) + .getSubsequentQueryResultsWithJob( + any(Long.class), + any(Long.class), + any(JobId.class), + any(GetQueryResultsResponse.class), + any(Boolean.class)); + when(bigqueryRpcMock.createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class))) + .thenReturn(jobResponseMock); // RPC call in createQueryJob + BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); + assertEquals(res.getTotalRows(), 2); + assertEquals(QUERY_SCHEMA, res.getSchema()); + verify(bigqueryRpcMock, times(1)) + .createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class)); + } + + // calls executeSelect with a nonFast query where the query returns an empty result. + @Test + void testLegacyQuerySinglePageEmptyResults() throws SQLException, IOException { + ConnectionImpl connectionSpy = Mockito.spy(connection); + com.google.api.services.bigquery.model.Job jobResponseMock = + new com.google.api.services.bigquery.model.Job() + .setJobReference(QUERY_JOB.toPb()) + .setId(JOB) + .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")); + // emulating a legacy query + doReturn(false).when(connectionSpy).isFastQuerySupported(); + doReturn(GET_QUERY_RESULTS_RESPONSE_EMPTY) + .when(connectionSpy) + .getQueryResultsFirstPage(any(JobId.class)); + when(bigqueryRpcMock.createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class))) + .thenReturn(jobResponseMock); // RPC call in createQueryJob + BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); + assertEquals(res.getTotalRows(), 0); + assertEquals(QUERY_SCHEMA, res.getSchema()); + assertEquals( + false, + res.getResultSet() + .next()); // Validates that NPE does not occur when reading from empty ResultSet. + verify(bigqueryRpcMock, times(1)) + .createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class)); + } + + // exercises getSubsequentQueryResultsWithJob for fast running queries + @Test + void testFastQueryLongRunning() throws SQLException, IOException { + ConnectionImpl connectionSpy = Mockito.spy(connection); + // emulating a fast query + doReturn(true).when(connectionSpy).isFastQuerySupported(); + doReturn(GET_QUERY_RESULTS_RESPONSE) + .when(connectionSpy) + .getQueryResultsFirstPage(any(JobId.class)); + + doReturn(TABLE_NAME).when(connectionSpy).getDestinationTable(any(JobId.class)); + doReturn(BQ_RS_MOCK_RES) + .when(connectionSpy) + .tableDataList(any(GetQueryResultsResponse.class), any(JobId.class)); + + com.google.api.services.bigquery.model.QueryResponse mockQueryRes = + new QueryResponse() + .setSchema(FAST_QUERY_TABLESCHEMA) + .setJobComplete(false) + .setTotalRows(new BigInteger(String.valueOf(4L))) + .setJobReference(QUERY_JOB.toPb()) + .setRows(TABLE_ROWS); + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) + .thenReturn(mockQueryRes); + BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); + assertEquals(res.getTotalRows(), 2); + assertEquals(QUERY_SCHEMA, res.getSchema()); + verify(bigqueryRpcMock, times(1)) + .queryRpcSkipExceptionTranslation(any(String.class), any(QueryRequest.class)); + } + + @Test + void testFastQueryLongRunningAsync() + throws SQLException, ExecutionException, InterruptedException, IOException { + ConnectionImpl connectionSpy = Mockito.spy(connection); + // emulating a fast query + doReturn(true).when(connectionSpy).isFastQuerySupported(); + doReturn(GET_QUERY_RESULTS_RESPONSE) + .when(connectionSpy) + .getQueryResultsFirstPage(any(JobId.class)); + + doReturn(TABLE_NAME).when(connectionSpy).getDestinationTable(any(JobId.class)); + doReturn(BQ_RS_MOCK_RES) + .when(connectionSpy) + .tableDataList(any(GetQueryResultsResponse.class), any(JobId.class)); + + com.google.api.services.bigquery.model.QueryResponse mockQueryRes = + new QueryResponse() + .setSchema(FAST_QUERY_TABLESCHEMA) + .setJobComplete(false) + .setTotalRows(new BigInteger(String.valueOf(4L))) + .setJobReference(QUERY_JOB.toPb()) + .setRows(TABLE_ROWS); + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) + .thenReturn(mockQueryRes); + ListenableFuture executeSelectFut = + connectionSpy.executeSelectAsync(SQL_QUERY); + ExecuteSelectResponse exSelRes = executeSelectFut.get(); + BigQueryResult res = exSelRes.getResultSet(); + assertEquals(res.getTotalRows(), 2); + assertEquals(QUERY_SCHEMA, res.getSchema()); + assertTrue(exSelRes.getIsSuccessful()); + verify(bigqueryRpcMock, times(1)) + .queryRpcSkipExceptionTranslation(any(String.class), any(QueryRequest.class)); + } + + @Test + void testFastQuerySinglePageAsync() + throws BigQuerySQLException, ExecutionException, InterruptedException, IOException { + com.google.api.services.bigquery.model.QueryResponse mockQueryRes = + new QueryResponse().setSchema(FAST_QUERY_TABLESCHEMA).setJobComplete(true); + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) + .thenReturn(mockQueryRes); + ConnectionImpl connectionSpy = Mockito.spy(connection); + doReturn(BQ_RS_MOCK_RES) + .when(connectionSpy) + .processQueryResponseResults(any(QueryResponse.class)); + + ListenableFuture executeSelectFut = + connectionSpy.executeSelectAsync(SQL_QUERY); + ExecuteSelectResponse exSelRes = executeSelectFut.get(); + BigQueryResult res = exSelRes.getResultSet(); + assertEquals(res.getTotalRows(), 2); + assertEquals(QUERY_SCHEMA, res.getSchema()); + assertTrue(exSelRes.getIsSuccessful()); + verify(connectionSpy, times(1)) + .processQueryResponseResults( + any(com.google.api.services.bigquery.model.QueryResponse.class)); + } + + @Test + void testExecuteSelectSlowWithParamsAsync() + throws BigQuerySQLException, ExecutionException, InterruptedException { + ConnectionImpl connectionSpy = Mockito.spy(connection); + List parameters = new ArrayList<>(); + Map labels = new HashMap<>(); + doReturn(false).when(connectionSpy).isFastQuerySupported(); + com.google.api.services.bigquery.model.JobStatistics jobStatistics = + new com.google.api.services.bigquery.model.JobStatistics(); + com.google.api.services.bigquery.model.Job jobResponseMock = + new com.google.api.services.bigquery.model.Job() + .setJobReference(QUERY_JOB.toPb()) + .setId(JOB) + .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")) + .setStatistics(jobStatistics); + + doReturn(jobResponseMock) + .when(connectionSpy) + .createQueryJob(SQL_QUERY, connectionSettings, parameters, labels); + doReturn(GET_QUERY_RESULTS_RESPONSE) + .when(connectionSpy) + .getQueryResultsFirstPage(any(JobId.class)); + doReturn(BQ_RS_MOCK_RES) + .when(connectionSpy) + .getResultSet( + any(GetQueryResultsResponse.class), + any(JobId.class), + any(String.class), + any(Boolean.class)); + ListenableFuture executeSelectFut = + connectionSpy.executeSelectAsync(SQL_QUERY, parameters, labels); + ExecuteSelectResponse exSelRes = executeSelectFut.get(); + BigQueryResult res = exSelRes.getResultSet(); + assertTrue(exSelRes.getIsSuccessful()); + assertEquals(res.getTotalRows(), 2); + assertEquals(QUERY_SCHEMA, res.getSchema()); + verify(connectionSpy, times(1)) + .getResultSet( + any(GetQueryResultsResponse.class), + any(JobId.class), + any(String.class), + any(Boolean.class)); + } + + @Test + void testFastQueryMultiplePagesAsync() + throws BigQuerySQLException, ExecutionException, InterruptedException, IOException { + com.google.api.services.bigquery.model.QueryResponse mockQueryRes = + new QueryResponse() + .setSchema(FAST_QUERY_TABLESCHEMA) + .setJobComplete(true) + .setPageToken(PAGE_TOKEN); + when(bigqueryRpcMock.queryRpcSkipExceptionTranslation( + any(String.class), any(QueryRequest.class))) + .thenReturn(mockQueryRes); + ConnectionImpl connectionSpy = Mockito.spy(connection); + + doReturn(BQ_RS_MOCK_RES_MULTI_PAGE) + .when(connectionSpy) + .processQueryResponseResults( + any(com.google.api.services.bigquery.model.QueryResponse.class)); + + ListenableFuture executeSelectFut = + connectionSpy.executeSelectAsync(SQL_QUERY); + ExecuteSelectResponse exSelRes = executeSelectFut.get(); + BigQueryResult res = exSelRes.getResultSet(); + assertTrue(exSelRes.getIsSuccessful()); + assertEquals(res.getTotalRows(), 4); + assertEquals(QUERY_SCHEMA, res.getSchema()); + verify(connectionSpy, times(1)) + .processQueryResponseResults( + any(com.google.api.services.bigquery.model.QueryResponse.class)); + } + + @Test + // Emulates first page response using getQueryResultsFirstPage(jobId) and then subsequent pages + // using getQueryResultsFirstPage(jobId) getSubsequentQueryResultsWithJob( + void testLegacyQueryMultiplePages() throws SQLException, IOException { + ConnectionImpl connectionSpy = Mockito.spy(connection); + com.google.api.services.bigquery.model.JobStatistics jobStatistics = + new com.google.api.services.bigquery.model.JobStatistics(); + // emulating a Legacy query + doReturn(false).when(connectionSpy).isFastQuerySupported(); + doReturn(GET_QUERY_RESULTS_RESPONSE) + .when(connectionSpy) + .getQueryResultsFirstPage(any(JobId.class)); + doReturn(TABLE_NAME).when(connectionSpy).getDestinationTable(any(JobId.class)); + doReturn(BQ_RS_MOCK_RES) + .when(connectionSpy) + .tableDataList(any(GetQueryResultsResponse.class), any(JobId.class)); + com.google.api.services.bigquery.model.Job jobResponseMock = + new com.google.api.services.bigquery.model.Job() + .setJobReference(QUERY_JOB.toPb()) + .setId(JOB) + .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")) + .setStatistics(jobStatistics); + when(bigqueryRpcMock.createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class))) + .thenReturn(jobResponseMock); // RPC call in createQueryJob + BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); + assertEquals(res.getTotalRows(), 2); + assertEquals(QUERY_SCHEMA, res.getSchema()); + verify(bigqueryRpcMock, times(1)) + .createJobForQuerySkipExceptionTranslation( + any(com.google.api.services.bigquery.model.Job.class)); + verify(connectionSpy, times(1)) + .tableDataList(any(GetQueryResultsResponse.class), any(JobId.class)); + } + + @Test + void testExecuteSelectSlow() throws BigQuerySQLException { + ConnectionImpl connectionSpy = Mockito.spy(connection); + doReturn(false).when(connectionSpy).isFastQuerySupported(); + com.google.api.services.bigquery.model.JobStatistics jobStatistics = + new com.google.api.services.bigquery.model.JobStatistics(); + com.google.api.services.bigquery.model.Job jobResponseMock = + new com.google.api.services.bigquery.model.Job() + .setJobReference(QUERY_JOB.toPb()) + .setId(JOB) + .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")) + .setStatistics(jobStatistics); + + doReturn(jobResponseMock) + .when(connectionSpy) + .createQueryJob(SQL_QUERY, connectionSettings, null, null); + doReturn(GET_QUERY_RESULTS_RESPONSE) + .when(connectionSpy) + .getQueryResultsFirstPage(any(JobId.class)); + doReturn(BQ_RS_MOCK_RES) + .when(connectionSpy) + .getResultSet( + any(GetQueryResultsResponse.class), + any(JobId.class), + any(String.class), + any(Boolean.class)); + BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY); + assertEquals(res.getTotalRows(), 2); + assertEquals(QUERY_SCHEMA, res.getSchema()); + verify(connectionSpy, times(1)) + .getResultSet( + any(GetQueryResultsResponse.class), + any(JobId.class), + any(String.class), + any(Boolean.class)); + } + + @Test + void testExecuteSelectSlowWithParams() throws BigQuerySQLException { + ConnectionImpl connectionSpy = Mockito.spy(connection); + List parameters = new ArrayList<>(); + Map labels = new HashMap<>(); + doReturn(false).when(connectionSpy).isFastQuerySupported(); + com.google.api.services.bigquery.model.JobStatistics jobStatistics = + new com.google.api.services.bigquery.model.JobStatistics(); + com.google.api.services.bigquery.model.Job jobResponseMock = + new com.google.api.services.bigquery.model.Job() + .setJobReference(QUERY_JOB.toPb()) + .setId(JOB) + .setStatus(new com.google.api.services.bigquery.model.JobStatus().setState("DONE")) + .setStatistics(jobStatistics); + + doReturn(jobResponseMock) + .when(connectionSpy) + .createQueryJob(SQL_QUERY, connectionSettings, parameters, labels); + doReturn(GET_QUERY_RESULTS_RESPONSE) + .when(connectionSpy) + .getQueryResultsFirstPage(any(JobId.class)); + doReturn(BQ_RS_MOCK_RES) + .when(connectionSpy) + .getResultSet( + any(GetQueryResultsResponse.class), + any(JobId.class), + any(String.class), + any(Boolean.class)); + BigQueryResult res = connectionSpy.executeSelect(SQL_QUERY, parameters, labels); + assertEquals(res.getTotalRows(), 2); + assertEquals(QUERY_SCHEMA, res.getSchema()); + verify(connectionSpy, times(1)) + .getResultSet( + any(GetQueryResultsResponse.class), + any(JobId.class), + any(String.class), + any(Boolean.class)); + } + + @Test + void testGetSubsequentQueryResultsWithJob() { + ConnectionImpl connectionSpy = Mockito.spy(connection); + JobId jobId = mock(JobId.class); + BigQueryResultStats bqRsStats = mock(BigQueryResultStats.class); + doReturn(true) + .when(connectionSpy) + .useReadAPI(any(Long.class), any(Long.class), any(Schema.class), any(Boolean.class)); + doReturn(BQ_RS_MOCK_RES) + .when(connectionSpy) + .highThroughPutRead( + any(TableId.class), any(Long.class), any(Schema.class), any(BigQueryResultStats.class)); + + doReturn(TABLE_NAME).when(connectionSpy).getDestinationTable(any(JobId.class)); + doReturn(bqRsStats).when(connectionSpy).getBigQueryResultSetStats(any(JobId.class)); + BigQueryResult res = + connectionSpy.getSubsequentQueryResultsWithJob( + 10000L, 100L, jobId, GET_QUERY_RESULTS_RESPONSE, false); + assertEquals(res.getTotalRows(), 2); + assertEquals(QUERY_SCHEMA, res.getSchema()); + verify(connectionSpy, times(1)) + .getSubsequentQueryResultsWithJob(10000L, 100L, jobId, GET_QUERY_RESULTS_RESPONSE, false); + } + + @Test + void testUseReadApi() { + ConnectionSettings connectionSettingsSpy = Mockito.spy(ConnectionSettings.class); + doReturn(true).when(connectionSettingsSpy).getUseReadAPI(); + doReturn(2).when(connectionSettingsSpy).getTotalToPageRowCountRatio(); + doReturn(100).when(connectionSettingsSpy).getMinResultSize(); + + connection = (ConnectionImpl) bigquery.createConnection(connectionSettingsSpy); + + // defaults to connectionSettings.getUseReadAPI() when total/page rows are null (job is still + // running) + assertTrue(connection.useReadAPI(null, null, QUERY_SCHEMA, false)); + + assertFalse(connection.useReadAPI(10000L, 10000L, QUERY_SCHEMA, false)); + assertFalse(connection.useReadAPI(50L, 10L, QUERY_SCHEMA, false)); + assertTrue(connection.useReadAPI(10000L, 10L, QUERY_SCHEMA, false)); + + // interval and query parameters not supported + assertFalse(connection.useReadAPI(10000L, 10L, QUERY_SCHEMA_WITH_INTERVAL_FIELD, false)); + assertFalse(connection.useReadAPI(10000L, 10L, QUERY_SCHEMA, true)); + + doReturn(false).when(connectionSettingsSpy).getUseReadAPI(); + assertFalse(connection.useReadAPI(null, null, QUERY_SCHEMA, false)); + assertFalse(connection.useReadAPI(10000L, 10L, QUERY_SCHEMA, false)); + } + + @Test + void testGetPageCacheSize() { + ConnectionImpl connectionSpy = Mockito.spy(connection); + // number of cached pages should be within a range + assertTrue(connectionSpy.getPageCacheSize(10000, QUERY_SCHEMA) >= 3); + assertTrue(connectionSpy.getPageCacheSize(100000000, QUERY_SCHEMA) <= 20); + verify(connectionSpy, times(2)).getPageCacheSize(any(Integer.class), any(Schema.class)); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionPropertyTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionPropertyTest.java index 9177720e8f..bc5def560b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionPropertyTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionPropertyTest.java @@ -18,7 +18,7 @@ import static com.google.common.truth.Truth.assertThat; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ConnectionPropertyTest { @@ -28,7 +28,7 @@ public class ConnectionPropertyTest { ConnectionProperty.newBuilder().setKey(KEY).setValue(VALUE).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareConnectionProperty(CONNECTION_PROPERTY, CONNECTION_PROPERTY.toBuilder().build()); ConnectionProperty property = CONNECTION_PROPERTY.toBuilder().setKey("time-zone").build(); assertThat(property.getKey()).isEqualTo("time-zone"); @@ -37,19 +37,19 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { ConnectionProperty connectionProperty = ConnectionProperty.of(KEY, VALUE); compareConnectionProperty(connectionProperty, connectionProperty.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertThat(CONNECTION_PROPERTY.getKey()).isEqualTo(KEY); assertThat(CONNECTION_PROPERTY.getValue()).isEqualTo(VALUE); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareConnectionProperty( CONNECTION_PROPERTY, ConnectionProperty.fromPb(CONNECTION_PROPERTY.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionSettingsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionSettingsTest.java new file mode 100644 index 0000000000..29c29ed55c --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ConnectionSettingsTest.java @@ -0,0 +1,166 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.google.cloud.bigquery.JobInfo.CreateDisposition; +import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; +import com.google.cloud.bigquery.JobInfo.WriteDisposition; +import com.google.cloud.bigquery.QueryJobConfiguration.Priority; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.util.List; +import java.util.Map; +import org.junit.jupiter.api.Test; + +class ConnectionSettingsTest { + private static final String TEST_PROJECT_ID = "test-project-id"; + private static final DatasetId DATASET_ID = DatasetId.of("dataset"); + private static final TableId TABLE_ID = TableId.of("dataset", "table"); + private static final Long REQUEST_TIMEOUT = 10l; + private static final Integer NUM_BUFFERED_ROWS = 100; + private static final Long MAX_RESULTS = 1000l; + private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final String KEY = "time_zone"; + private static final String VALUE = "US/Eastern"; + private static final ConnectionProperty CONNECTION_PROPERTY = + ConnectionProperty.newBuilder().setKey(KEY).setValue(VALUE).build(); + private static final List CONNECTION_PROPERTIES = + ImmutableList.of(CONNECTION_PROPERTY); + private static final Field FIELD_SCHEMA1 = + Field.newBuilder("StringField", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("FieldDescription1") + .build(); + private static final Field FIELD_SCHEMA2 = + Field.newBuilder("IntegerField", StandardSQLTypeName.INT64) + .setMode(Field.Mode.REPEATED) + .setDescription("FieldDescription2") + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2); + private static final Integer MAX_BAD_RECORDS = 42; + private static final Boolean IGNORE_UNKNOWN_VALUES = true; + private static final String COMPRESSION = "GZIP"; + private static final CsvOptions CSV_OPTIONS = CsvOptions.newBuilder().build(); + private static final ExternalTableDefinition TABLE_CONFIGURATION = + ExternalTableDefinition.newBuilder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .setCompression(COMPRESSION) + .setIgnoreUnknownValues(IGNORE_UNKNOWN_VALUES) + .setMaxBadRecords(MAX_BAD_RECORDS) + .build(); + private static final Map TABLE_DEFINITIONS = + ImmutableMap.of("tableName", TABLE_CONFIGURATION); + private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; + private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; + private static final Priority PRIORITY = Priority.BATCH; + private static final boolean ALLOW_LARGE_RESULTS = true; + private static final boolean USE_QUERY_CACHE = false; + private static final boolean FLATTEN_RESULTS = true; + private static final Integer MAX_BILLING_TIER = 123; + private static final Long MAX_BYTES_BILL = 12345L; + private static final List SCHEMA_UPDATE_OPTIONS = + ImmutableList.of(SchemaUpdateOption.ALLOW_FIELD_RELAXATION); + private static final List USER_DEFINED_FUNCTIONS = + ImmutableList.of(UserDefinedFunction.inline("Function"), UserDefinedFunction.fromUri("URI")); + private static final EncryptionConfiguration JOB_ENCRYPTION_CONFIGURATION = + EncryptionConfiguration.newBuilder().setKmsKeyName("KMS_KEY_1").build(); + private static final TimePartitioning TIME_PARTITIONING = + TimePartitioning.of(TimePartitioning.Type.DAY); + private static final Clustering CLUSTERING = + Clustering.newBuilder().setFields(ImmutableList.of("Foo", "Bar")).build(); + private static final Long TIMEOUT = 10L; + private static final RangePartitioning.Range RANGE = + RangePartitioning.Range.newBuilder().setStart(1L).setInterval(2L).setEnd(10L).build(); + private static final RangePartitioning RANGE_PARTITIONING = + RangePartitioning.newBuilder().setField("IntegerField").setRange(RANGE).build(); + + private static final ConnectionSettings CONNECTION_SETTINGS = + ConnectionSettings.newBuilder() + .setRequestTimeout(REQUEST_TIMEOUT) + .setNumBufferedRows(NUM_BUFFERED_ROWS) + .setMaxResults(MAX_RESULTS) + .setUseQueryCache(USE_QUERY_CACHE) + .setTableDefinitions(TABLE_DEFINITIONS) + .setAllowLargeResults(ALLOW_LARGE_RESULTS) + .setCreateDisposition(CREATE_DISPOSITION) + .setDefaultDataset(DATASET_ID) + .setDestinationTable(TABLE_ID) + .setWriteDisposition(WRITE_DISPOSITION) + .setPriority(PRIORITY) + .setFlattenResults(FLATTEN_RESULTS) + .setUserDefinedFunctions(USER_DEFINED_FUNCTIONS) + .setMaximumBillingTier(MAX_BILLING_TIER) + .setMaximumBytesBilled(MAX_BYTES_BILL) + .setSchemaUpdateOptions(SCHEMA_UPDATE_OPTIONS) + .setDestinationEncryptionConfiguration(JOB_ENCRYPTION_CONFIGURATION) + .setTimePartitioning(TIME_PARTITIONING) + .setClustering(CLUSTERING) + .setJobTimeoutMs(TIMEOUT) + .setRangePartitioning(RANGE_PARTITIONING) + .setConnectionProperties(CONNECTION_PROPERTIES) + .build(); + + @Test + void testToBuilder() { + compareConnectionSettings(CONNECTION_SETTINGS, CONNECTION_SETTINGS.toBuilder().build()); + } + + @Test + void testToBuilderIncomplete() { + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DATASET_ID).build(); + compareConnectionSettings(connectionSettings, connectionSettings.toBuilder().build()); + } + + @Test + void testBuilder() { + assertEquals(REQUEST_TIMEOUT, CONNECTION_SETTINGS.getRequestTimeout()); + assertEquals(NUM_BUFFERED_ROWS, CONNECTION_SETTINGS.getNumBufferedRows()); + assertEquals(MAX_RESULTS, CONNECTION_SETTINGS.getMaxResults()); + } + + private void compareConnectionSettings(ConnectionSettings expected, ConnectionSettings value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.getRequestTimeout(), value.getRequestTimeout()); + assertEquals(expected.getNumBufferedRows(), value.getNumBufferedRows()); + assertEquals(expected.getMaxResults(), value.getMaxResults()); + assertEquals(expected.getAllowLargeResults(), value.getAllowLargeResults()); + assertEquals(expected.getCreateDisposition(), value.getCreateDisposition()); + assertEquals(expected.getDefaultDataset(), value.getDefaultDataset()); + assertEquals(expected.getDestinationTable(), value.getDestinationTable()); + assertEquals(expected.getFlattenResults(), value.getFlattenResults()); + assertEquals(expected.getPriority(), value.getPriority()); + assertEquals(expected.getTableDefinitions(), value.getTableDefinitions()); + assertEquals(expected.getUseQueryCache(), value.getUseQueryCache()); + assertEquals(expected.getUserDefinedFunctions(), value.getUserDefinedFunctions()); + assertEquals(expected.getWriteDisposition(), value.getWriteDisposition()); + assertEquals(expected.getMaximumBillingTier(), value.getMaximumBillingTier()); + assertEquals(expected.getMaximumBytesBilled(), value.getMaximumBytesBilled()); + assertEquals(expected.getSchemaUpdateOptions(), value.getSchemaUpdateOptions()); + assertEquals( + expected.getDestinationEncryptionConfiguration(), + value.getDestinationEncryptionConfiguration()); + assertEquals(expected.getTimePartitioning(), value.getTimePartitioning()); + assertEquals(expected.getClustering(), value.getClustering()); + assertEquals(expected.getJobTimeoutMs(), value.getJobTimeoutMs()); + assertEquals(expected.getRangePartitioning(), value.getRangePartitioning()); + assertEquals(expected.getConnectionProperties(), value.getConnectionProperties()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java index be62bc1d68..97538f2991 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.WriteDisposition; @@ -28,9 +28,9 @@ import com.google.common.collect.Lists; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class CopyJobConfigurationTest { +class CopyJobConfigurationTest { private static final String TEST_PROJECT_ID = "test-project-id"; private static final TableId SOURCE_TABLE = TableId.of("dataset", "sourceTable"); @@ -44,6 +44,7 @@ public class CopyJobConfigurationTest { EncryptionConfiguration.newBuilder().setKmsKeyName("KMS_KEY_1").build(); private static final Map LABELS = ImmutableMap.of("job-name", "copy"); private static final Long TIMEOUT = 10L; + private static final String RESERVATION = "reservation"; private static final CopyJobConfiguration COPY_JOB_CONFIGURATION = CopyJobConfiguration.newBuilder(DESTINATION_TABLE, SOURCE_TABLE) .setCreateDisposition(CREATE_DISPOSITION) @@ -51,6 +52,7 @@ public class CopyJobConfigurationTest { .setDestinationEncryptionConfiguration(COPY_JOB_ENCRYPTION_CONFIGURATION) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); private static final CopyJobConfiguration COPY_JOB_CONFIGURATION_MULTIPLE_TABLES = CopyJobConfiguration.newBuilder(DESTINATION_TABLE, SOURCE_TABLES) @@ -58,17 +60,17 @@ public class CopyJobConfigurationTest { .setWriteDisposition(WRITE_DISPOSITION) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareCopyJobConfiguration(COPY_JOB_CONFIGURATION, COPY_JOB_CONFIGURATION.toBuilder().build()); compareCopyJobConfiguration( COPY_JOB_CONFIGURATION_MULTIPLE_TABLES, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toBuilder().build()); CopyJobConfiguration jobConfiguration = - COPY_JOB_CONFIGURATION - .toBuilder() + COPY_JOB_CONFIGURATION.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", jobConfiguration.getDestinationTable().getTable()); @@ -77,7 +79,7 @@ public void testToBuilder() { } @Test - public void testOf() { + void testOf() { CopyJobConfiguration job = CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLES); assertEquals(DESTINATION_TABLE, job.getDestinationTable()); assertEquals(SOURCE_TABLES, job.getSourceTables()); @@ -87,14 +89,14 @@ public void testOf() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { CopyJobConfiguration jobConfiguration = CopyJobConfiguration.of(DESTINATION_TABLE, SOURCE_TABLES); compareCopyJobConfiguration(jobConfiguration, jobConfiguration.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(DESTINATION_TABLE, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.getDestinationTable()); assertEquals(SOURCE_TABLES, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.getSourceTables()); assertEquals(CREATE_DISPOSITION, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.getCreateDisposition()); @@ -108,12 +110,13 @@ public void testBuilder() { } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { assertNotNull(COPY_JOB_CONFIGURATION.toPb().getCopy()); assertNull(COPY_JOB_CONFIGURATION.toPb().getExtract()); assertNull(COPY_JOB_CONFIGURATION.toPb().getLoad()); assertNull(COPY_JOB_CONFIGURATION.toPb().getQuery()); - assertNull(COPY_JOB_CONFIGURATION.toPb().getCopy().getSourceTables()); + assertNull(COPY_JOB_CONFIGURATION.toPb().getCopy().getSourceTable()); + assertNotNull(COPY_JOB_CONFIGURATION.toPb().getCopy().getSourceTables()); assertNull(COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toPb().getCopy().getSourceTable()); assertNotNull(COPY_JOB_CONFIGURATION.getLabels()); assertNotNull(COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.getLabels()); @@ -130,7 +133,7 @@ public void testToPbAndFromPb() { } @Test - public void testSetProjectId() { + void testSetProjectId() { CopyJobConfiguration configuration = COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.setProjectId(TEST_PROJECT_ID); assertEquals(TEST_PROJECT_ID, configuration.getDestinationTable().getProject()); @@ -140,10 +143,9 @@ public void testSetProjectId() { } @Test - public void testSetProjectIdDoNotOverride() { + void testSetProjectIdDoNotOverride() { CopyJobConfiguration configuration = - COPY_JOB_CONFIGURATION_MULTIPLE_TABLES - .toBuilder() + COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.toBuilder() .setSourceTables( Lists.transform( SOURCE_TABLES, @@ -163,7 +165,7 @@ public TableId apply(TableId tableId) { } @Test - public void testGetType() { + void testGetType() { assertEquals(JobConfiguration.Type.COPY, COPY_JOB_CONFIGURATION.getType()); assertEquals(JobConfiguration.Type.COPY, COPY_JOB_CONFIGURATION_MULTIPLE_TABLES.getType()); } @@ -182,5 +184,6 @@ private void compareCopyJobConfiguration( value.getDestinationEncryptionConfiguration()); assertEquals(expected.getLabels(), value.getLabels()); assertEquals(expected.getJobTimeoutMs(), value.getJobTimeoutMs()); + assertEquals(expected.getReservation(), value.getReservation()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java index 21ca11fdfc..1c31540fc0 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class CsvOptionsTest { @@ -28,20 +28,25 @@ public class CsvOptionsTest { private static final Boolean ALLOW_QUOTED_NEWLINE = true; private static final Charset ENCODING = StandardCharsets.UTF_8; private static final String FIELD_DELIMITER = ","; + private static final String NULL_MARKER = "\\N"; private static final String QUOTE = "\""; private static final long SKIP_LEADING_ROWS = 42L; + + private static final boolean PRESERVE_ASCII_CONTROL_CHARACTERS = true; private static final CsvOptions CSV_OPTIONS = CsvOptions.newBuilder() .setAllowJaggedRows(ALLOW_JAGGED_ROWS) .setAllowQuotedNewLines(ALLOW_QUOTED_NEWLINE) .setEncoding(ENCODING) .setFieldDelimiter(FIELD_DELIMITER) + .setNullMarker(NULL_MARKER) .setQuote(QUOTE) .setSkipLeadingRows(SKIP_LEADING_ROWS) + .setPreserveAsciiControlCharacters(PRESERVE_ASCII_CONTROL_CHARACTERS) .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareCsvOptions(CSV_OPTIONS, CSV_OPTIONS.toBuilder().build()); CsvOptions csvOptions = CSV_OPTIONS.toBuilder().setFieldDelimiter(";").build(); assertEquals(";", csvOptions.getFieldDelimiter()); @@ -50,24 +55,27 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { CsvOptions csvOptions = CsvOptions.newBuilder().setFieldDelimiter("|").build(); assertEquals(csvOptions, csvOptions.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(FormatOptions.CSV, CSV_OPTIONS.getType()); assertEquals(ALLOW_JAGGED_ROWS, CSV_OPTIONS.allowJaggedRows()); assertEquals(ALLOW_QUOTED_NEWLINE, CSV_OPTIONS.allowQuotedNewLines()); assertEquals(ENCODING.name(), CSV_OPTIONS.getEncoding()); assertEquals(FIELD_DELIMITER, CSV_OPTIONS.getFieldDelimiter()); + assertEquals(NULL_MARKER, CSV_OPTIONS.getNullMarker()); assertEquals(QUOTE, CSV_OPTIONS.getQuote()); assertEquals(SKIP_LEADING_ROWS, (long) CSV_OPTIONS.getSkipLeadingRows()); + assertEquals( + PRESERVE_ASCII_CONTROL_CHARACTERS, CSV_OPTIONS.getPreserveAsciiControlCharacters()); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareCsvOptions(CSV_OPTIONS, CsvOptions.fromPb(CSV_OPTIONS.toPb())); CsvOptions csvOptions = CsvOptions.newBuilder().setAllowJaggedRows(ALLOW_JAGGED_ROWS).build(); compareCsvOptions(csvOptions, CsvOptions.fromPb(csvOptions.toPb())); @@ -79,6 +87,7 @@ private void compareCsvOptions(CsvOptions expected, CsvOptions value) { assertEquals(expected.allowQuotedNewLines(), value.allowQuotedNewLines()); assertEquals(expected.getEncoding(), value.getEncoding()); assertEquals(expected.getFieldDelimiter(), value.getFieldDelimiter()); + assertEquals(expected.getNullMarker(), value.getNullMarker()); assertEquals(expected.getQuote(), value.getQuote()); assertEquals(expected.getSkipLeadingRows(), value.getSkipLeadingRows()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetIdTest.java index bacf7b2b0e..dc2ba28998 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetIdTest.java @@ -16,17 +16,17 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class DatasetIdTest { +class DatasetIdTest { private static final DatasetId DATASET = DatasetId.of("dataset"); private static final DatasetId DATASET_COMPLETE = DatasetId.of("project", "dataset"); @Test - public void testOf() { + void testOf() { assertEquals(null, DATASET.getProject()); assertEquals("dataset", DATASET.getDataset()); assertEquals("project", DATASET_COMPLETE.getProject()); @@ -34,19 +34,19 @@ public void testOf() { } @Test - public void testEquals() { + void testEquals() { compareDatasetIds(DATASET, DatasetId.of("dataset")); compareDatasetIds(DATASET_COMPLETE, DatasetId.of("project", "dataset")); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareDatasetIds(DATASET, DatasetId.fromPb(DATASET.toPb())); compareDatasetIds(DATASET_COMPLETE, DatasetId.fromPb(DATASET_COMPLETE.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals(DATASET_COMPLETE, DATASET.setProjectId("project")); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java index 453701e3a8..cb9768de4d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java @@ -16,15 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class DatasetInfoTest { @@ -58,6 +59,19 @@ public class DatasetInfoTest { private static final DatasetId DATASET_ID_COMPLETE = DatasetId.of("project", "dataset"); private static final EncryptionConfiguration DATASET_ENCRYPTION_CONFIGURATION = EncryptionConfiguration.newBuilder().setKmsKeyName("KMS_KEY_1").build(); + private static final String STORAGE_BILLING_MODEL = "LOGICAL"; + private static final Long MAX_TIME_TRAVEL_HOURS_5_DAYS = 120L; + private static final Long MAX_TIME_TRAVEL_HOURS_7_DAYS = 168L; + private static final Map RESOURCE_TAGS = + ImmutableMap.of( + "example-key1", "example-value1", + "example-key2", "example-value2"); + + private static final ExternalDatasetReference EXTERNAL_DATASET_REFERENCE = + ExternalDatasetReference.newBuilder() + .setExternalSource("source") + .setConnection("connection") + .build(); private static final DatasetInfo DATASET_INFO = DatasetInfo.newBuilder(DATASET_ID) .setAcl(ACCESS_RULES) @@ -73,25 +87,30 @@ public class DatasetInfoTest { .setLabels(LABELS) .setDefaultEncryptionConfiguration(DATASET_ENCRYPTION_CONFIGURATION) .setDefaultPartitionExpirationMs(DEFAULT_PARTITION__EXPIRATION) + .setStorageBillingModel(STORAGE_BILLING_MODEL) + .setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS_7_DAYS) + .setResourceTags(RESOURCE_TAGS) .build(); private static final DatasetInfo DATASET_INFO_COMPLETE = - DATASET_INFO - .toBuilder() + DATASET_INFO.toBuilder() .setDatasetId(DATASET_ID_COMPLETE) .setAcl(ACCESS_RULES_COMPLETE) .build(); private static final DatasetInfo DATASET_INFO_COMPLETE_WITH_IAM_MEMBER = DATASET_INFO.toBuilder().setAcl(ACCESS_RULES_IAM_MEMBER).build(); + private static final DatasetInfo DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE = + DATASET_INFO.toBuilder().setExternalDatasetReference(EXTERNAL_DATASET_REFERENCE).build(); + private static final DatasetInfo DATASET_INFO_WITH_MAX_TIME_TRAVEL_5_DAYS = + DATASET_INFO.toBuilder().setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS_5_DAYS).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareDatasets(DATASET_INFO, DATASET_INFO.toBuilder().build()); compareDatasets( DATASET_INFO_COMPLETE_WITH_IAM_MEMBER, DATASET_INFO_COMPLETE_WITH_IAM_MEMBER.toBuilder().build()); DatasetInfo datasetInfo = - DATASET_INFO - .toBuilder() + DATASET_INFO.toBuilder() .setDatasetId(DatasetId.of("dataset2")) .setDescription("description2") .build(); @@ -103,13 +122,34 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { DatasetInfo datasetInfo = DatasetInfo.newBuilder(DATASET_ID).build(); assertEquals(datasetInfo, datasetInfo.toBuilder().build()); } @Test - public void testBuilder() { + void testToBuilderWithExternalDatasetReference() { + compareDatasets( + DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE, + DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE.toBuilder().build()); + + ExternalDatasetReference externalDatasetReference = + ExternalDatasetReference.newBuilder() + .setExternalSource("source2") + .setConnection("connection2") + .build(); + DatasetInfo datasetInfo = + DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE.toBuilder() + .setExternalDatasetReference(externalDatasetReference) + .build(); + assertEquals(externalDatasetReference, datasetInfo.getExternalDatasetReference()); + datasetInfo = + datasetInfo.toBuilder().setExternalDatasetReference(EXTERNAL_DATASET_REFERENCE).build(); + compareDatasets(DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE, datasetInfo); + } + + @Test + void testBuilder() { assertNull(DATASET_INFO.getDatasetId().getProject()); assertEquals(DATASET_ID, DATASET_INFO.getDatasetId()); assertEquals(ACCESS_RULES, DATASET_INFO.getAcl()); @@ -137,10 +177,19 @@ public void testBuilder() { assertEquals(LOCATION, DATASET_INFO_COMPLETE.getLocation()); assertEquals(SELF_LINK, DATASET_INFO_COMPLETE.getSelfLink()); assertEquals(LABELS, DATASET_INFO_COMPLETE.getLabels()); + assertEquals( + EXTERNAL_DATASET_REFERENCE, + DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE.getExternalDatasetReference()); + assertEquals(STORAGE_BILLING_MODEL, DATASET_INFO_COMPLETE.getStorageBillingModel()); + assertEquals(MAX_TIME_TRAVEL_HOURS_7_DAYS, DATASET_INFO.getMaxTimeTravelHours()); + assertEquals( + MAX_TIME_TRAVEL_HOURS_5_DAYS, + DATASET_INFO_WITH_MAX_TIME_TRAVEL_5_DAYS.getMaxTimeTravelHours()); + assertEquals(RESOURCE_TAGS, DATASET_INFO.getResourceTags()); } @Test - public void testOf() { + void testOf() { DatasetInfo datasetInfo = DatasetInfo.of(DATASET_ID.getDataset()); assertEquals(DATASET_ID, datasetInfo.getDatasetId()); assertNull(datasetInfo.getAcl()); @@ -156,6 +205,9 @@ public void testOf() { assertNull(datasetInfo.getDefaultEncryptionConfiguration()); assertNull(datasetInfo.getDefaultPartitionExpirationMs()); assertTrue(datasetInfo.getLabels().isEmpty()); + assertNull(datasetInfo.getExternalDatasetReference()); + assertNull(datasetInfo.getStorageBillingModel()); + assertNull(datasetInfo.getMaxTimeTravelHours()); datasetInfo = DatasetInfo.of(DATASET_ID); assertEquals(DATASET_ID, datasetInfo.getDatasetId()); @@ -172,20 +224,36 @@ public void testOf() { assertNull(datasetInfo.getDefaultEncryptionConfiguration()); assertNull(datasetInfo.getDefaultPartitionExpirationMs()); assertTrue(datasetInfo.getLabels().isEmpty()); + assertNull(datasetInfo.getExternalDatasetReference()); + assertNull(datasetInfo.getStorageBillingModel()); + assertNull(datasetInfo.getMaxTimeTravelHours()); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareDatasets(DATASET_INFO_COMPLETE, DatasetInfo.fromPb(DATASET_INFO_COMPLETE.toPb())); + compareDatasets( + DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE, + DatasetInfo.fromPb(DATASET_INFO_COMPLETE_WITH_EXTERNAL_DATASET_REFERENCE.toPb())); DatasetInfo datasetInfo = DatasetInfo.newBuilder("project", "dataset").build(); compareDatasets(datasetInfo, DatasetInfo.fromPb(datasetInfo.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals(DATASET_INFO_COMPLETE, DATASET_INFO.setProjectId("project")); } + @Test + void testSetMaxTimeTravelHours() { + assertNotEquals( + DATASET_INFO_WITH_MAX_TIME_TRAVEL_5_DAYS.getMaxTimeTravelHours(), + DATASET_INFO.getMaxTimeTravelHours()); + assertEquals( + DATASET_INFO_WITH_MAX_TIME_TRAVEL_5_DAYS, + DATASET_INFO.toBuilder().setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS_5_DAYS).build()); + } + private void compareDatasets(DatasetInfo expected, DatasetInfo value) { assertEquals(expected, value); assertEquals(expected.getDatasetId(), value.getDatasetId()); @@ -204,5 +272,9 @@ private void compareDatasets(DatasetInfo expected, DatasetInfo value) { expected.getDefaultEncryptionConfiguration(), value.getDefaultEncryptionConfiguration()); assertEquals( expected.getDefaultPartitionExpirationMs(), value.getDefaultPartitionExpirationMs()); + assertEquals(expected.getExternalDatasetReference(), value.getExternalDatasetReference()); + assertEquals(expected.getStorageBillingModel(), value.getStorageBillingModel()); + assertEquals(expected.getMaxTimeTravelHours(), value.getMaxTimeTravelHours()); + assertEquals(expected.getResourceTags(), value.getResourceTags()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java index ae710a9fc9..5e19e8c825 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java @@ -16,13 +16,13 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -35,15 +35,13 @@ import com.google.common.collect.Iterables; import java.util.List; import java.util.Map; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.junit.MockitoRule; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) -public class DatasetTest { +@ExtendWith(MockitoExtension.class) +class DatasetTest { private static final DatasetId DATASET_ID = DatasetId.of("dataset"); private static final List ACCESS_RULES = @@ -66,6 +64,12 @@ public class DatasetTest { private static final String SELF_LINK = "http://bigquery/p/d"; private static final DatasetInfo DATASET_INFO = DatasetInfo.newBuilder(DATASET_ID).build(); private static final Field FIELD = Field.of("FieldName", LegacySQLTypeName.INTEGER); + private static final String STORAGE_BILLING_MODEL = "LOGICAL"; + private static final Long MAX_TIME_TRAVEL_HOURS = 168L; + private static final Map RESOURCE_TAGS = + ImmutableMap.of( + "example-key1", "example-value1", + "example-key2", "example-value2"); private static final StandardTableDefinition TABLE_DEFINITION = StandardTableDefinition.of(Schema.of(FIELD)); private static final ViewDefinition VIEW_DEFINITION = ViewDefinition.of("QUERY"); @@ -77,16 +81,25 @@ public class DatasetTest { TableInfo.newBuilder(TableId.of("dataset", "table2"), VIEW_DEFINITION).build(); private static final TableInfo TABLE_INFO3 = TableInfo.newBuilder(TableId.of("dataset", "table3"), EXTERNAL_TABLE_DEFINITION).build(); - - @Rule public MockitoRule rule; + private static final String NEW_PROJECT_ID = "projectId2"; + private static final TableId TABLE_ID1 = TableId.of(NEW_PROJECT_ID, "dataset", "table3"); + private static final TableInfo TABLE_INFO4 = + TableInfo.newBuilder( + TableId.of(NEW_PROJECT_ID, "dataset", "table3"), EXTERNAL_TABLE_DEFINITION) + .build(); + private static final ExternalDatasetReference EXTERNAL_DATASET_REFERENCE = + ExternalDatasetReference.newBuilder() + .setExternalSource("source") + .setConnection("connection") + .build(); private BigQuery bigquery; private BigQueryOptions mockOptions; private Dataset expectedDataset; private Dataset dataset; - @Before - public void setUp() { + @BeforeEach + void setUp() { bigquery = mock(BigQuery.class); mockOptions = mock(BigQueryOptions.class); when(bigquery.getOptions()).thenReturn(mockOptions); @@ -95,7 +108,7 @@ public void setUp() { } @Test - public void testBuilder() { + void testBuilder() { Dataset builtDataset = new Dataset.Builder(bigquery, DATASET_ID) .setAcl(ACCESS_RULES) @@ -109,6 +122,9 @@ public void testBuilder() { .setLocation(LOCATION) .setSelfLink(SELF_LINK) .setLabels(LABELS) + .setStorageBillingModel(STORAGE_BILLING_MODEL) + .setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS) + .setResourceTags(RESOURCE_TAGS) .build(); assertEquals(DATASET_ID, builtDataset.getDatasetId()); assertEquals(ACCESS_RULES, builtDataset.getAcl()); @@ -122,15 +138,18 @@ public void testBuilder() { assertEquals(LOCATION, builtDataset.getLocation()); assertEquals(SELF_LINK, builtDataset.getSelfLink()); assertEquals(LABELS, builtDataset.getLabels()); + assertEquals(STORAGE_BILLING_MODEL, builtDataset.getStorageBillingModel()); + assertEquals(MAX_TIME_TRAVEL_HOURS, builtDataset.getMaxTimeTravelHours()); + assertEquals(RESOURCE_TAGS, builtDataset.getResourceTags()); } @Test - public void testToBuilder() { + void testToBuilder() { compareDataset(expectedDataset, expectedDataset.toBuilder().build()); } @Test - public void testExists_True() { + void testExists_True() { BigQuery.DatasetOption[] expectedOptions = {BigQuery.DatasetOption.fields()}; when(bigquery.getDataset(DATASET_INFO.getDatasetId(), expectedOptions)) .thenReturn(expectedDataset); @@ -139,7 +158,7 @@ public void testExists_True() { } @Test - public void testExists_False() { + void testExists_False() { BigQuery.DatasetOption[] expectedOptions = {BigQuery.DatasetOption.fields()}; when(bigquery.getDataset(DATASET_INFO.getDatasetId(), expectedOptions)).thenReturn(null); assertFalse(dataset.exists()); @@ -147,7 +166,7 @@ public void testExists_False() { } @Test - public void testReload() { + void testReload() { DatasetInfo updatedInfo = DATASET_INFO.toBuilder().setDescription("Description").build(); Dataset expectedDataset = new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedInfo)); when(bigquery.getDataset(DATASET_INFO.getDatasetId().getDataset())).thenReturn(expectedDataset); @@ -157,14 +176,14 @@ public void testReload() { } @Test - public void testReloadNull() { + void testReloadNull() { when(bigquery.getDataset(DATASET_INFO.getDatasetId().getDataset())).thenReturn(null); assertNull(dataset.reload()); verify(bigquery).getDataset(DATASET_INFO.getDatasetId().getDataset()); } @Test - public void testReloadWithOptions() { + void testReloadWithOptions() { DatasetInfo updatedInfo = DATASET_INFO.toBuilder().setDescription("Description").build(); Dataset expectedDataset = new Dataset(bigquery, new DatasetInfo.BuilderImpl(updatedInfo)); when(bigquery.getDataset( @@ -177,7 +196,7 @@ public void testReloadWithOptions() { } @Test - public void testUpdate() { + void testUpdate() { Dataset expectedUpdatedDataset = expectedDataset.toBuilder().setDescription("Description").build(); when(bigquery.update(eq(expectedDataset))).thenReturn(expectedUpdatedDataset); @@ -187,7 +206,7 @@ public void testUpdate() { } @Test - public void testUpdateWithOptions() { + void testUpdateWithOptions() { Dataset expectedUpdatedDataset = expectedDataset.toBuilder().setDescription("Description").build(); when(bigquery.update(eq(expectedDataset), eq(BigQuery.DatasetOption.fields()))) @@ -198,21 +217,21 @@ public void testUpdateWithOptions() { } @Test - public void testDeleteTrue() { + void testDeleteTrue() { when(bigquery.delete(DATASET_INFO.getDatasetId())).thenReturn(true); assertTrue(dataset.delete()); verify(bigquery).delete(DATASET_INFO.getDatasetId()); } @Test - public void testDeleteFalse() { + void testDeleteFalse() { when(bigquery.delete(DATASET_INFO.getDatasetId())).thenReturn(false); assertFalse(dataset.delete()); verify(bigquery).delete(DATASET_INFO.getDatasetId()); } @Test - public void testList() { + void testList() { List
    tableResults = ImmutableList.of( new Table(bigquery, new Table.BuilderImpl(TABLE_INFO1)), @@ -228,7 +247,7 @@ public void testList() { } @Test - public void testListWithOptions() { + void testListWithOptions() { List
    tableResults = ImmutableList.of( new Table(bigquery, new Table.BuilderImpl(TABLE_INFO1)), @@ -246,7 +265,7 @@ public void testListWithOptions() { } @Test - public void testGet() { + void testGet() { Table expectedTable = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO1)); when(bigquery.getTable(TABLE_INFO1.getTableId())).thenReturn(expectedTable); Table table = dataset.get(TABLE_INFO1.getTableId().getTable()); @@ -256,14 +275,24 @@ public void testGet() { } @Test - public void testGetNull() { + void testGetTableWithNewProjectId() { + Table expectedTable = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO4)); + when(bigquery.getTable(TABLE_ID1, null)).thenReturn(expectedTable); + Table table = bigquery.getTable(TABLE_ID1, null); + assertNotNull(table); + assertEquals(table.getTableId().getProject(), NEW_PROJECT_ID); + verify(bigquery).getTable(TABLE_ID1, null); + } + + @Test + void testGetNull() { when(bigquery.getTable(TABLE_INFO1.getTableId())).thenReturn(null); assertNull(dataset.get(TABLE_INFO1.getTableId().getTable())); verify(bigquery).getTable(TABLE_INFO1.getTableId()); } @Test - public void testGetWithOptions() { + void testGetWithOptions() { Table expectedTable = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO1)); when(bigquery.getTable(TABLE_INFO1.getTableId(), BigQuery.TableOption.fields())) .thenReturn(expectedTable); @@ -274,7 +303,7 @@ public void testGetWithOptions() { } @Test - public void testCreateTable() { + void testCreateTable() { Table expectedTable = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO1)); when(bigquery.create(TABLE_INFO1)).thenReturn(expectedTable); Table table = dataset.create(TABLE_INFO1.getTableId().getTable(), TABLE_DEFINITION); @@ -283,7 +312,7 @@ public void testCreateTable() { } @Test - public void testCreateTableWithOptions() { + void testCreateTableWithOptions() { Table expectedTable = new Table(bigquery, new TableInfo.BuilderImpl(TABLE_INFO1)); when(bigquery.create(TABLE_INFO1, BigQuery.TableOption.fields())).thenReturn(expectedTable); Table table = @@ -294,15 +323,43 @@ public void testCreateTableWithOptions() { } @Test - public void testBigQuery() { + void testBigQuery() { assertSame(bigquery, expectedDataset.getBigQuery()); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareDataset(expectedDataset, Dataset.fromPb(bigquery, expectedDataset.toPb())); } + @Test + void testExternalDatasetReference() { + Dataset datasetWithExternalDatasetReference = + new Dataset.Builder(bigquery, DATASET_ID) + .setAcl(ACCESS_RULES) + .setCreationTime(CREATION_TIME) + .setDefaultTableLifetime(DEFAULT_TABLE_EXPIRATION) + .setDescription(DESCRIPTION) + .setEtag(ETAG) + .setFriendlyName(FRIENDLY_NAME) + .setGeneratedId(GENERATED_ID) + .setLastModified(LAST_MODIFIED) + .setLocation(LOCATION) + .setSelfLink(SELF_LINK) + .setLabels(LABELS) + .setExternalDatasetReference(EXTERNAL_DATASET_REFERENCE) + .setStorageBillingModel(STORAGE_BILLING_MODEL) + .setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS) + .setResourceTags(RESOURCE_TAGS) + .build(); + assertEquals( + EXTERNAL_DATASET_REFERENCE, + datasetWithExternalDatasetReference.getExternalDatasetReference()); + compareDataset( + datasetWithExternalDatasetReference, + datasetWithExternalDatasetReference.toBuilder().build()); + } + private void compareDataset(Dataset expected, Dataset value) { assertEquals(expected, value); compareDatasetInfo(expected, value); @@ -322,5 +379,9 @@ private void compareDatasetInfo(DatasetInfo expected, DatasetInfo value) { assertEquals(expected.getCreationTime(), value.getCreationTime()); assertEquals(expected.getDefaultTableLifetime(), value.getDefaultTableLifetime()); assertEquals(expected.getLastModified(), value.getLastModified()); + assertEquals(expected.getExternalDatasetReference(), value.getExternalDatasetReference()); + assertEquals(expected.getStorageBillingModel(), value.getStorageBillingModel()); + assertEquals(expected.getMaxTimeTravelHours(), value.getMaxTimeTravelHours()); + assertEquals(expected.getResourceTags(), value.getResourceTags()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatastoreBackupOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatastoreBackupOptionsTest.java index af14108032..0102781190 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatastoreBackupOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatastoreBackupOptionsTest.java @@ -16,20 +16,20 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class DatastoreBackupOptionsTest { +class DatastoreBackupOptionsTest { private static final List PROJECTION_FIELDS = ImmutableList.of("field1", "field2"); private static final DatastoreBackupOptions BACKUP_OPTIONS = DatastoreBackupOptions.newBuilder().setProjectionFields(PROJECTION_FIELDS).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareDatastoreBackupOptions(BACKUP_OPTIONS, BACKUP_OPTIONS.toBuilder().build()); List fields = ImmutableList.of("field1", "field2"); DatastoreBackupOptions backupOptions = @@ -40,14 +40,14 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { DatastoreBackupOptions backupOptions = DatastoreBackupOptions.newBuilder().setProjectionFields(PROJECTION_FIELDS).build(); assertEquals(backupOptions, backupOptions.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(FormatOptions.DATASTORE_BACKUP, BACKUP_OPTIONS.getType()); assertEquals(PROJECTION_FIELDS, BACKUP_OPTIONS.getProjectionFields()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DmlStatsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DmlStatsTest.java new file mode 100644 index 0000000000..f165b60e39 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DmlStatsTest.java @@ -0,0 +1,55 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +public class DmlStatsTest { + + private static final Long DELETED_ROW_COUNT = 10L; + private static final Long INSERTED_ROW_COUNT = 20L; + private static final Long UPDATED_ROW_COUNT = 30L; + private static final DmlStats DML_STATS = + DmlStats.newBuilder() + .setDeletedRowCount(DELETED_ROW_COUNT) + .setInsertedRowCount(INSERTED_ROW_COUNT) + .setUpdatedRowCount(UPDATED_ROW_COUNT) + .build(); + + @Test + void testBuilder() { + assertEquals(DELETED_ROW_COUNT, DML_STATS.getDeletedRowCount()); + assertEquals(UPDATED_ROW_COUNT, DML_STATS.getUpdatedRowCount()); + assertEquals(INSERTED_ROW_COUNT, DML_STATS.getInsertedRowCount()); + } + + @Test + void testToPbAndFromPb() { + compareDmlStats(DML_STATS, DmlStats.fromPb(DML_STATS.toPb())); + } + + private void compareDmlStats(DmlStats expected, DmlStats actual) { + assertEquals(expected, actual); + assertEquals(expected.hashCode(), actual.hashCode()); + assertEquals(expected.toString(), actual.toString()); + assertEquals(expected.getDeletedRowCount(), actual.getDeletedRowCount()); + assertEquals(expected.getInsertedRowCount(), actual.getInsertedRowCount()); + assertEquals(expected.getUpdatedRowCount(), actual.getUpdatedRowCount()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalDatasetReferenceTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalDatasetReferenceTest.java new file mode 100644 index 0000000000..26dfcd5dcc --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalDatasetReferenceTest.java @@ -0,0 +1,70 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; + +public class ExternalDatasetReferenceTest { + private static final String EXTERNAL_SOURCE = "test_source"; + private static final String CONNECTION = "test_connection"; + private static final ExternalDatasetReference EXTERNAL_DATASET_REFERENCE = + ExternalDatasetReference.newBuilder() + .setExternalSource(EXTERNAL_SOURCE) + .setConnection(CONNECTION) + .build(); + + @Test + void testToBuilder() { + compareExternalDatasetReference( + EXTERNAL_DATASET_REFERENCE, EXTERNAL_DATASET_REFERENCE.toBuilder().build()); + ExternalDatasetReference externalDatasetReference = + EXTERNAL_DATASET_REFERENCE.toBuilder().setExternalSource("test_source2").build(); + assertEquals("test_source2", externalDatasetReference.getExternalSource()); + } + + @Test + void testBuilder() { + assertEquals(EXTERNAL_SOURCE, EXTERNAL_DATASET_REFERENCE.getExternalSource()); + assertEquals(CONNECTION, EXTERNAL_DATASET_REFERENCE.getConnection()); + ExternalDatasetReference externalDatasetReference = + ExternalDatasetReference.newBuilder() + .setExternalSource(EXTERNAL_SOURCE) + .setConnection(CONNECTION) + .build(); + assertEquals(EXTERNAL_DATASET_REFERENCE, externalDatasetReference); + } + + @Test + void testToAndFromPb() { + ExternalDatasetReference externalDatasetReference = + EXTERNAL_DATASET_REFERENCE.toBuilder().build(); + assertTrue( + ExternalDatasetReference.fromPb(externalDatasetReference.toPb()) + instanceof ExternalDatasetReference); + compareExternalDatasetReference( + externalDatasetReference, ExternalDatasetReference.fromPb(externalDatasetReference.toPb())); + } + + private void compareExternalDatasetReference( + ExternalDatasetReference expected, ExternalDatasetReference value) { + assertEquals(expected.getExternalSource(), value.getExternalSource()); + assertEquals(expected.getConnection(), value.getConnection()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java index 23a095cb65..480b8a4972 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java @@ -16,17 +16,19 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import com.google.cloud.bigquery.ExternalTableDefinition.SourceColumnMatch; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class ExternalTableDefinitionTest { +class ExternalTableDefinitionTest { private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final List DECIMAL_TARGET_TYPES = + ImmutableList.of("NUMERIC", "BIGNUMERIC", "STRING"); private static final Field FIELD_SCHEMA1 = Field.newBuilder("StringField", LegacySQLTypeName.STRING) .setMode(Field.Mode.NULLABLE) @@ -48,37 +50,65 @@ public class ExternalTableDefinitionTest { private static final String COMPRESSION = "GZIP"; private static final String CONNECTION_ID = "123456789"; private static final Boolean AUTODETECT = true; + private static final AvroOptions AVRO_OPTIONS = AvroOptions.newBuilder().build(); private static final CsvOptions CSV_OPTIONS = CsvOptions.newBuilder().build(); + private static final ParquetOptions PARQUET_OPTIONS = ParquetOptions.newBuilder().build(); private static final HivePartitioningOptions HIVE_PARTITIONING_OPTIONS = HivePartitioningOptions.newBuilder() .setMode("AUTO") .setSourceUriPrefix(SOURCE_URIS.get(0)) .build(); + private static final String OBJECT_METADATA = "SIMPLE"; + private static final String METADATA_CACHE_MODE = "AUTOMATIC"; + private static final String MAX_STALENESS = "INTERVAL 15 MINUTE"; + private static final String TIME_ZONE = "America/Los_Angeles"; + private static final String DATE_FORMAT = "YYYY-MM-DD"; + private static final String DATETIME_FORMAT = "YYYY-MM-DD HH:MI:SS"; + private static final String TIME_FORMAT = "HH:MI:SS"; + private static final String TIMESTAMP_FORMAT = "YYYY-MM-DD HH:MI:SS"; + private static final SourceColumnMatch SOURCE_COLUMN_MATCH = SourceColumnMatch.POSITION; + private static final List NULL_MARKERS = ImmutableList.of("SQL NULL", "TEST_MARKER"); private static final ExternalTableDefinition EXTERNAL_TABLE_DEFINITION = ExternalTableDefinition.newBuilder(SOURCE_URIS, TABLE_SCHEMA, CSV_OPTIONS) + .setFileSetSpecType("FILE_SET_SPEC_TYPE_FILE_SYSTEM_MATCH") + .setDecimalTargetTypes(DECIMAL_TARGET_TYPES) .setCompression(COMPRESSION) .setConnectionId(CONNECTION_ID) .setIgnoreUnknownValues(IGNORE_UNKNOWN_VALUES) .setMaxBadRecords(MAX_BAD_RECORDS) .setAutodetect(AUTODETECT) .setHivePartitioningOptions(HIVE_PARTITIONING_OPTIONS) + .setObjectMetadata(OBJECT_METADATA) + .setMetadataCacheMode(METADATA_CACHE_MODE) + .setMaxStaleness(MAX_STALENESS) + .setTimeZone(TIME_ZONE) + .setDateFormat(DATE_FORMAT) + .setDatetimeFormat(DATETIME_FORMAT) + .setTimeFormat(TIME_FORMAT) + .setTimestampFormat(TIMESTAMP_FORMAT) + .setSourceColumnMatch(SOURCE_COLUMN_MATCH) + .setNullMarkers(NULL_MARKERS) .build(); + private static final ExternalTableDefinition EXTERNAL_TABLE_DEFINITION_AVRO = + ExternalTableDefinition.newBuilder(SOURCE_URIS, TABLE_SCHEMA, AVRO_OPTIONS).build(); + + private static final ExternalTableDefinition EXTERNAL_TABLE_DEFINITION_PARQUET = + ExternalTableDefinition.newBuilder(SOURCE_URIS, TABLE_SCHEMA, PARQUET_OPTIONS).build(); + @Test - public void testToBuilder() { + void testToBuilder() { compareExternalTableDefinition( EXTERNAL_TABLE_DEFINITION, EXTERNAL_TABLE_DEFINITION.toBuilder().build()); ExternalTableDefinition externalTableDefinition = - EXTERNAL_TABLE_DEFINITION - .toBuilder() + EXTERNAL_TABLE_DEFINITION.toBuilder() .setCompression("NONE") .setConnectionId("00000") .build(); assertEquals("NONE", externalTableDefinition.getCompression()); assertEquals("00000", externalTableDefinition.getConnectionId()); externalTableDefinition = - externalTableDefinition - .toBuilder() + externalTableDefinition.toBuilder() .setCompression(COMPRESSION) .setConnectionId(CONNECTION_ID) .build(); @@ -86,38 +116,45 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { ExternalTableDefinition externalTableDefinition = ExternalTableDefinition.of(SOURCE_URIS, TABLE_SCHEMA, FormatOptions.json()); assertEquals(externalTableDefinition, externalTableDefinition.toBuilder().build()); } @Test - public void testTypeNullPointerException() { - try { - EXTERNAL_TABLE_DEFINITION.toBuilder().setType(null).build(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + void testTypeNullPointerException() { + org.junit.jupiter.api.Assertions.assertThrows( + NullPointerException.class, + () -> EXTERNAL_TABLE_DEFINITION.toBuilder().setType(null).build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(TableDefinition.Type.EXTERNAL, EXTERNAL_TABLE_DEFINITION.getType()); assertEquals(COMPRESSION, EXTERNAL_TABLE_DEFINITION.getCompression()); assertEquals(CONNECTION_ID, EXTERNAL_TABLE_DEFINITION.getConnectionId()); + assertEquals(AVRO_OPTIONS, EXTERNAL_TABLE_DEFINITION_AVRO.getFormatOptions()); assertEquals(CSV_OPTIONS, EXTERNAL_TABLE_DEFINITION.getFormatOptions()); assertEquals(IGNORE_UNKNOWN_VALUES, EXTERNAL_TABLE_DEFINITION.ignoreUnknownValues()); assertEquals(MAX_BAD_RECORDS, EXTERNAL_TABLE_DEFINITION.getMaxBadRecords()); assertEquals(TABLE_SCHEMA, EXTERNAL_TABLE_DEFINITION.getSchema()); assertEquals(SOURCE_URIS, EXTERNAL_TABLE_DEFINITION.getSourceUris()); + assertEquals(DECIMAL_TARGET_TYPES, EXTERNAL_TABLE_DEFINITION.getDecimalTargetTypes()); assertEquals(AUTODETECT, EXTERNAL_TABLE_DEFINITION.getAutodetect()); assertEquals(HIVE_PARTITIONING_OPTIONS, EXTERNAL_TABLE_DEFINITION.getHivePartitioningOptions()); + assertEquals(TIME_ZONE, EXTERNAL_TABLE_DEFINITION.getTimeZone()); + assertEquals(DATE_FORMAT, EXTERNAL_TABLE_DEFINITION.getDateFormat()); + assertEquals(DATETIME_FORMAT, EXTERNAL_TABLE_DEFINITION.getDatetimeFormat()); + assertEquals(TIME_FORMAT, EXTERNAL_TABLE_DEFINITION.getTimeFormat()); + assertEquals(TIMESTAMP_FORMAT, EXTERNAL_TABLE_DEFINITION.getTimestampFormat()); + assertEquals(SOURCE_COLUMN_MATCH, EXTERNAL_TABLE_DEFINITION.getSourceColumnMatch()); + assertEquals(NULL_MARKERS, EXTERNAL_TABLE_DEFINITION.getNullMarkers()); assertNotEquals(EXTERNAL_TABLE_DEFINITION, TableDefinition.Type.EXTERNAL); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareExternalTableDefinition( EXTERNAL_TABLE_DEFINITION, ExternalTableDefinition.fromPb(EXTERNAL_TABLE_DEFINITION.toPb())); @@ -127,9 +164,22 @@ public void testToAndFromPb() { externalTableDefinition, ExternalTableDefinition.fromPb(externalTableDefinition.toPb())); } + @Test + void testToAndFromPbParquet() { + compareExternalTableDefinition( + EXTERNAL_TABLE_DEFINITION_PARQUET, + ExternalTableDefinition.fromPb(EXTERNAL_TABLE_DEFINITION_PARQUET.toPb())); + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.newBuilder(SOURCE_URIS, TABLE_SCHEMA, PARQUET_OPTIONS).build(); + compareExternalTableDefinition( + externalTableDefinition, ExternalTableDefinition.fromPb(externalTableDefinition.toPb())); + } + private void compareExternalTableDefinition( ExternalTableDefinition expected, ExternalTableDefinition value) { assertEquals(expected, value); + assertEquals(expected.getFileSetSpecType(), value.getFileSetSpecType()); + assertEquals(expected.getDecimalTargetTypes(), value.getDecimalTargetTypes()); assertEquals(expected.getCompression(), value.getCompression()); assertEquals(expected.getConnectionId(), value.getConnectionId()); assertEquals(expected.getFormatOptions(), value.getFormatOptions()); @@ -140,5 +190,15 @@ private void compareExternalTableDefinition( assertEquals(expected.hashCode(), value.hashCode()); assertEquals(expected.getAutodetect(), value.getAutodetect()); assertEquals(expected.getHivePartitioningOptions(), value.getHivePartitioningOptions()); + assertEquals(expected.getObjectMetadata(), value.getObjectMetadata()); + assertEquals(expected.getMetadataCacheMode(), value.getMetadataCacheMode()); + assertEquals(expected.getMaxStaleness(), value.getMaxStaleness()); + assertEquals(expected.getTimeZone(), value.getTimeZone()); + assertEquals(expected.getDateFormat(), value.getDateFormat()); + assertEquals(expected.getDatetimeFormat(), value.getDatetimeFormat()); + assertEquals(expected.getTimeFormat(), value.getTimeFormat()); + assertEquals(expected.getTimestampFormat(), value.getTimestampFormat()); + assertEquals(expected.getSourceColumnMatch(), value.getSourceColumnMatch()); + assertEquals(expected.getNullMarkers(), value.getNullMarkers()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java index 95142a0680..d7ce318f1b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java @@ -16,15 +16,15 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class ExtractJobConfigurationTest { @@ -43,6 +43,7 @@ public class ExtractJobConfigurationTest { private static final Map LABELS = ImmutableMap.of("test-job-name", "test-extract-job"); private static final Long TIMEOUT = 10L; + private static final String RESERVATION = "reservation"; private static final ExtractJobConfiguration EXTRACT_CONFIGURATION = ExtractJobConfiguration.newBuilder(TABLE_ID, DESTINATION_URIS) .setPrintHeader(PRINT_HEADER) @@ -51,6 +52,7 @@ public class ExtractJobConfigurationTest { .setFormat(FORMAT) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); private static final ExtractJobConfiguration EXTRACT_CONFIGURATION_ONE_URI = ExtractJobConfiguration.newBuilder(TABLE_ID, DESTINATION_URI) @@ -60,6 +62,7 @@ public class ExtractJobConfigurationTest { .setFormat(FORMAT) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); private static final ExtractJobConfiguration EXTRACT_CONFIGURATION_AVRO = ExtractJobConfiguration.newBuilder(TABLE_ID, DESTINATION_URI) @@ -70,6 +73,7 @@ public class ExtractJobConfigurationTest { .setUseAvroLogicalTypes(USEAVROLOGICALTYPES) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); private static final ExtractJobConfiguration EXTRACT_CONFIGURATION_MODEL = ExtractJobConfiguration.newBuilder(MODEL_ID, DESTINATION_URIS) @@ -80,10 +84,11 @@ public class ExtractJobConfigurationTest { .setUseAvroLogicalTypes(USEAVROLOGICALTYPES) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) + .setReservation(RESERVATION) .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareExtractJobConfiguration( EXTRACT_CONFIGURATION, EXTRACT_CONFIGURATION.toBuilder().build()); ExtractJobConfiguration job = @@ -92,8 +97,7 @@ public void testToBuilder() { compareExtractJobConfiguration( EXTRACT_CONFIGURATION_MODEL, EXTRACT_CONFIGURATION_MODEL.toBuilder().build()); ExtractJobConfiguration modelJob = - EXTRACT_CONFIGURATION_MODEL - .toBuilder() + EXTRACT_CONFIGURATION_MODEL.toBuilder() .setSourceModel(ModelId.of("dataset", "newModel")) .build(); assertEquals("newModel", modelJob.getSourceModel().getModel()); @@ -102,8 +106,7 @@ public void testToBuilder() { compareExtractJobConfiguration( EXTRACT_CONFIGURATION_AVRO, EXTRACT_CONFIGURATION_AVRO.toBuilder().build()); ExtractJobConfiguration avroJob = - EXTRACT_CONFIGURATION_AVRO - .toBuilder() + EXTRACT_CONFIGURATION_AVRO.toBuilder() .setSourceTable(TableId.of("dataset", "avroTable")) .build(); assertEquals("avroTable", avroJob.getSourceTable().getTable()); @@ -187,6 +190,7 @@ public void testBuilder() { assertEquals(FORMAT, EXTRACT_CONFIGURATION_MODEL.getFormat()); assertEquals(LABELS, EXTRACT_CONFIGURATION_MODEL.getLabels()); assertEquals(TIMEOUT, EXTRACT_CONFIGURATION_MODEL.getJobTimeoutMs()); + assertEquals(RESERVATION, EXTRACT_CONFIGURATION_MODEL.getReservation()); } @Test @@ -223,15 +227,13 @@ public void testSetProjectId() { @Test public void testSetProjectIdDoNotOverride() { ExtractJobConfiguration configuration = - EXTRACT_CONFIGURATION - .toBuilder() + EXTRACT_CONFIGURATION.toBuilder() .setSourceTable(TABLE_ID.setProjectId(TEST_PROJECT_ID)) .build() .setProjectId("do-not-update"); assertEquals(TEST_PROJECT_ID, configuration.getSourceTable().getProject()); ExtractJobConfiguration modelConfiguration = - EXTRACT_CONFIGURATION_MODEL - .toBuilder() + EXTRACT_CONFIGURATION_MODEL.toBuilder() .setSourceModel(MODEL_ID.setProjectId(TEST_PROJECT_ID)) .build() .setProjectId("do-not-update"); @@ -260,5 +262,6 @@ private void compareExtractJobConfiguration( assertEquals(expected.getFormat(), value.getFormat()); assertEquals(expected.getLabels(), value.getLabels()); assertEquals(expected.getJobTimeoutMs(), value.getJobTimeoutMs()); + assertEquals(expected.getReservation(), value.getReservation()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java new file mode 100644 index 0000000000..7821b93219 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldElementTypeTest.java @@ -0,0 +1,52 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.google.api.services.bigquery.model.QueryParameterType; +import org.junit.jupiter.api.Test; + +public class FieldElementTypeTest { + private static final FieldElementType FIELD_ELEMENT_TYPE = + FieldElementType.newBuilder().setType("DATE").build(); + + @Test + public void testToBuilder() { + compareFieldElementType(FIELD_ELEMENT_TYPE, FIELD_ELEMENT_TYPE.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals("DATE", FIELD_ELEMENT_TYPE.getType()); + } + + @Test + public void testFromAndPb() { + assertEquals(FIELD_ELEMENT_TYPE, FieldElementType.fromPb(FIELD_ELEMENT_TYPE.toPb())); + assertEquals( + FIELD_ELEMENT_TYPE, + FieldElementType.fromPb( + new QueryParameterType() + .setRangeElementType(new QueryParameterType().setType("DATE")))); + } + + private void compareFieldElementType(FieldElementType expected, FieldElementType value) { + assertEquals(expected.getType(), value.getType()); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldListTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldListTest.java index 999bbf1b02..9f63716427 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldListTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldListTest.java @@ -16,13 +16,14 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class FieldListTest { +class FieldListTest { private static final String FIELD_NAME1 = "StringField"; private static final String FIELD_NAME2 = "IntegerField"; private static final String FIELD_NAME3 = "RecordField"; @@ -63,7 +64,7 @@ public class FieldListTest { private final FieldList fieldsSchema = FieldList.of(fieldSchema1, fieldSchema2, fieldSchema3); @Test - public void testGetByName() { + void testGetByName() { assertEquals(fieldSchema1, fieldsSchema.get(FIELD_NAME1)); assertEquals(fieldSchema2, fieldsSchema.get(FIELD_NAME2)); assertEquals(fieldSchema3, fieldsSchema.get(FIELD_NAME3)); @@ -76,34 +77,26 @@ public void testGetByName() { assertEquals(3, fieldsSchema.size()); - IllegalArgumentException exception = null; - try { - fieldsSchema.get(FIELD_NAME4); - } catch (IllegalArgumentException e) { - exception = e; - } + IllegalArgumentException exception = + assertThrows(IllegalArgumentException.class, () -> fieldsSchema.get(FIELD_NAME4)); assertNotNull(exception); } @Test - public void testGetByIndex() { + void testGetByIndex() { assertEquals(fieldSchema1, fieldsSchema.get(0)); assertEquals(fieldSchema2, fieldsSchema.get(1)); assertEquals(fieldSchema3, fieldsSchema.get(2)); assertEquals(3, fieldsSchema.size()); - IndexOutOfBoundsException exception = null; - try { - fieldsSchema.get(4); - } catch (IndexOutOfBoundsException e) { - exception = e; - } + IndexOutOfBoundsException exception = + assertThrows(IndexOutOfBoundsException.class, () -> fieldsSchema.get(4)); assertNotNull(exception); } @Test - public void testGetRecordSchema() { + void testGetRecordSchema() { assertEquals(2, fieldSchema3.getSubFields().size()); assertEquals(fieldSchema1, fieldSchema3.getSubFields().get(FIELD_NAME1)); assertEquals(fieldSchema2, fieldSchema3.getSubFields().get(FIELD_NAME2)); @@ -122,7 +115,7 @@ public void testGetRecordSchema() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { assertEquals(fieldsSchema, FieldList.of(fieldSchema1, fieldSchema2, fieldSchema3)); assertNotEquals(fieldsSchema, FieldList.of(fieldSchema1, fieldSchema3)); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java index cac8ab1b8f..72f8bb3e82 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java @@ -16,20 +16,23 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class FieldTest { private static final String FIELD_NAME1 = "StringField"; private static final String FIELD_NAME2 = "IntegerField"; private static final String FIELD_NAME3 = "RecordField"; + private static final String FIELD_NAME4 = "NullModeField"; + private static final String FIELD_NAME5 = "NullModeField2"; private static final LegacySQLTypeName FIELD_TYPE1 = LegacySQLTypeName.STRING; private static final LegacySQLTypeName FIELD_TYPE2 = LegacySQLTypeName.INTEGER; private static final StandardSQLTypeName FIELD_TYPE1_STANDARD = StandardSQLTypeName.STRING; @@ -39,10 +42,13 @@ public class FieldTest { private static final String FIELD_DESCRIPTION1 = "FieldDescription1"; private static final String FIELD_DESCRIPTION2 = "FieldDescription2"; private static final String FIELD_DESCRIPTION3 = "FieldDescription3"; + private static final String FIELD_DEFAULT_VALUE_EXPRESSION1 = + "This is default value for this field"; private static final Field FIELD_SCHEMA1 = Field.newBuilder(FIELD_NAME1, FIELD_TYPE1) .setMode(FIELD_MODE1) .setDescription(FIELD_DESCRIPTION1) + .setDefaultValueExpression(FIELD_DEFAULT_VALUE_EXPRESSION1) .build(); private static final Field FIELD_SCHEMA2 = Field.newBuilder(FIELD_NAME2, FIELD_TYPE2) @@ -60,6 +66,7 @@ public class FieldTest { Field.newBuilder(FIELD_NAME1, StandardSQLTypeName.STRING) .setMode(FIELD_MODE1) .setDescription(FIELD_DESCRIPTION1) + .setDefaultValueExpression(FIELD_DEFAULT_VALUE_EXPRESSION1) .build(); private static final Field STANDARD_FIELD_SCHEMA2 = Field.newBuilder(FIELD_NAME2, StandardSQLTypeName.INT64) @@ -75,6 +82,10 @@ public class FieldTest { .setMode(FIELD_MODE3) .setDescription(FIELD_DESCRIPTION3) .build(); + private static final Field STANDARD_FIELD_SCHEMA4 = + Field.newBuilder(FIELD_NAME4, StandardSQLTypeName.INT64).setMode(null).build(); + private static final Field STANDARD_FIELD_SCHEMA5 = + Field.newBuilder(FIELD_NAME5, StandardSQLTypeName.STRING).build(); @Test public void testToBuilder() { @@ -92,6 +103,8 @@ public void testToBuilderWithStandardSQLTypeName() { compareFieldSchemas(STANDARD_FIELD_SCHEMA1, STANDARD_FIELD_SCHEMA1.toBuilder().build()); compareFieldSchemas(STANDARD_FIELD_SCHEMA2, STANDARD_FIELD_SCHEMA2.toBuilder().build()); compareFieldSchemas(STANDARD_FIELD_SCHEMA3, STANDARD_FIELD_SCHEMA3.toBuilder().build()); + compareFieldSchemas(STANDARD_FIELD_SCHEMA4, STANDARD_FIELD_SCHEMA4.toBuilder().build()); + compareFieldSchemas(STANDARD_FIELD_SCHEMA5, STANDARD_FIELD_SCHEMA5.toBuilder().build()); Field field = STANDARD_FIELD_SCHEMA1.toBuilder().setDescription("New Description").build(); assertEquals("New Description", field.getDescription()); field = field.toBuilder().setDescription(FIELD_DESCRIPTION1).build(); @@ -137,6 +150,7 @@ public void testBuilder() { assertEquals(FIELD_TYPE1, FIELD_SCHEMA1.getType()); assertEquals(FIELD_MODE1, FIELD_SCHEMA1.getMode()); assertEquals(FIELD_DESCRIPTION1, FIELD_SCHEMA1.getDescription()); + assertEquals(FIELD_DEFAULT_VALUE_EXPRESSION1, FIELD_SCHEMA1.getDefaultValueExpression()); assertEquals(null, FIELD_SCHEMA1.getSubFields()); assertEquals(FIELD_NAME3, FIELD_SCHEMA3.getName()); assertEquals(FIELD_TYPE3, FIELD_SCHEMA3.getType()); @@ -151,11 +165,14 @@ public void testBuilderWithStandardSQLTypeName() { assertEquals(FIELD_TYPE1, STANDARD_FIELD_SCHEMA1.getType()); assertEquals(FIELD_MODE1, STANDARD_FIELD_SCHEMA1.getMode()); assertEquals(FIELD_DESCRIPTION1, STANDARD_FIELD_SCHEMA1.getDescription()); + assertEquals(FIELD_DEFAULT_VALUE_EXPRESSION1, FIELD_SCHEMA1.getDefaultValueExpression()); assertEquals(null, STANDARD_FIELD_SCHEMA1.getSubFields()); assertEquals(FIELD_NAME3, STANDARD_FIELD_SCHEMA3.getName()); assertEquals(FIELD_TYPE3, STANDARD_FIELD_SCHEMA3.getType()); assertEquals(FIELD_MODE3, STANDARD_FIELD_SCHEMA3.getMode()); assertEquals(FIELD_DESCRIPTION3, STANDARD_FIELD_SCHEMA3.getDescription()); + assertEquals(null, STANDARD_FIELD_SCHEMA4.getMode()); + assertEquals(null, STANDARD_FIELD_SCHEMA5.getMode()); assertEquals( FieldList.of(STANDARD_FIELD_SCHEMA1, STANDARD_FIELD_SCHEMA2), STANDARD_FIELD_SCHEMA3.getSubFields()); @@ -175,6 +192,8 @@ public void testToAndFromPbWithStandardSQLTypeName() { compareFieldSchemas(STANDARD_FIELD_SCHEMA1, Field.fromPb(STANDARD_FIELD_SCHEMA1.toPb())); compareFieldSchemas(STANDARD_FIELD_SCHEMA2, Field.fromPb(STANDARD_FIELD_SCHEMA2.toPb())); compareFieldSchemas(STANDARD_FIELD_SCHEMA3, Field.fromPb(STANDARD_FIELD_SCHEMA3.toPb())); + compareFieldSchemas(STANDARD_FIELD_SCHEMA4, Field.fromPb(STANDARD_FIELD_SCHEMA4.toPb())); + compareFieldSchemas(STANDARD_FIELD_SCHEMA5, Field.fromPb(STANDARD_FIELD_SCHEMA5.toPb())); Field field = Field.newBuilder(FIELD_NAME1, FIELD_TYPE1).build(); compareFieldSchemas(field, Field.fromPb(field.toPb())); } @@ -195,6 +214,20 @@ public void testSubFieldWithClonedType() throws Exception { Field.of("field", clonedRecord, Field.of("subfield", LegacySQLTypeName.BOOLEAN)); } + @Test + public void setTimestampPrecisionValues() { + Field.Builder builder = Field.newBuilder(FIELD_NAME1, FIELD_TYPE1); + + // Value values: 6L or 12L + builder.setTimestampPrecision(6L); + builder.setTimestampPrecision(12L); + + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(-1L)); + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(0L)); + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(5L)); + assertThrows(IllegalArgumentException.class, () -> builder.setTimestampPrecision(13L)); + } + private void compareFieldSchemas(Field expected, Field value) { assertEquals(expected, value); assertEquals(expected.getName(), value.getName()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java index 7d10a97504..dd5092b1c1 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java @@ -16,8 +16,10 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.api.client.util.Data; import com.google.api.services.bigquery.model.TableCell; @@ -27,9 +29,9 @@ import com.google.common.io.BaseEncoding; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class FieldValueListTest { +class FieldValueListTest { private static final byte[] BYTES = {0xD, 0xE, 0xA, 0xD}; private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES); private static final TableCell booleanPb = new TableCell().setV("false"); @@ -52,6 +54,12 @@ public class FieldValueListTest { Field.of("tenth", LegacySQLTypeName.NUMERIC), Field.of("eleventh", LegacySQLTypeName.BIGNUMERIC)); + private final FieldList schemaLosslessTimestamp = + FieldList.of( + Field.of("first", LegacySQLTypeName.BOOLEAN), + Field.of("second", LegacySQLTypeName.INTEGER), + Field.of("third", LegacySQLTypeName.TIMESTAMP)); + private final Map integerPb = ImmutableMap.of("v", "1"); private final Map floatPb = ImmutableMap.of("v", "1.5"); private final Map stringPb = ImmutableMap.of("v", "string"); @@ -68,10 +76,15 @@ public class FieldValueListTest { "v", "99999999999999999999999999999999999999.99999999999999999999999999999999999999"); private final FieldValue booleanFv = FieldValue.of(Attribute.PRIMITIVE, "false"); + private final FieldValue booleanLosslessTimestampFv = + FieldValue.of(Attribute.PRIMITIVE, "false", true); private final FieldValue integerFv = FieldValue.of(Attribute.PRIMITIVE, "1"); + private final FieldValue integerLosslessTimestampFv = + FieldValue.of(Attribute.PRIMITIVE, "1", true); private final FieldValue floatFv = FieldValue.of(Attribute.PRIMITIVE, "1.5"); private final FieldValue stringFv = FieldValue.of(Attribute.PRIMITIVE, "string"); private final FieldValue timestampFv = FieldValue.of(Attribute.PRIMITIVE, "42"); + private final FieldValue losslessTimestampFv = FieldValue.of(Attribute.PRIMITIVE, "42", true); private final FieldValue bytesFv = FieldValue.of(Attribute.PRIMITIVE, BYTES_BASE64); private final FieldValue nullFv = FieldValue.of(Attribute.PRIMITIVE, null); private final FieldValue repeatedFv = @@ -117,15 +130,29 @@ public class FieldValueListTest { bigNumericFv), schema); + private final List fieldValuesLosslessTimestampPb = + ImmutableList.of(booleanPb, integerPb, timestampPb); + private final FieldValueList fieldValuesLosslessTimestamp = + FieldValueList.of( + ImmutableList.of( + booleanLosslessTimestampFv, integerLosslessTimestampFv, losslessTimestampFv), + schemaLosslessTimestamp); + @Test - public void testFromPb() { + void testFromPb() { assertEquals(fieldValues, FieldValueList.fromPb(fieldValuesPb, schema)); // Schema does not influence values equality assertEquals(fieldValues, FieldValueList.fromPb(fieldValuesPb, null)); + + assertNotEquals(fieldValues, FieldValueList.fromPb(fieldValuesPb, null, true)); + + assertEquals( + fieldValuesLosslessTimestamp, + FieldValueList.fromPb(fieldValuesLosslessTimestampPb, null, true)); } @Test - public void testGetByIndex() { + void testGetByIndex() { assertEquals(11, fieldValues.size()); assertEquals(booleanFv, fieldValues.get(0)); assertEquals(integerFv, fieldValues.get(1)); @@ -147,7 +174,7 @@ public void testGetByIndex() { } @Test - public void testGetByName() { + void testGetByName() { assertEquals(11, fieldValues.size()); assertEquals(booleanFv, fieldValues.get("first")); assertEquals(integerFv, fieldValues.get("second")); @@ -169,7 +196,7 @@ public void testGetByName() { } @Test - public void testNullSchema() { + void testNullSchema() { FieldValueList fieldValuesNoSchema = FieldValueList.of( ImmutableList.of( @@ -187,25 +214,15 @@ public void testNullSchema() { assertEquals(fieldValues, fieldValuesNoSchema); - UnsupportedOperationException exception = null; - try { - fieldValuesNoSchema.get("first"); - } catch (UnsupportedOperationException e) { - exception = e; - } - + UnsupportedOperationException exception = + assertThrows(UnsupportedOperationException.class, () -> fieldValuesNoSchema.get("first")); assertNotNull(exception); } @Test - public void testGetNonExistentField() { - IllegalArgumentException exception = null; - try { - fieldValues.get("nonexistent"); - } catch (IllegalArgumentException e) { - exception = e; - } - + void testGetNonExistentField() { + IllegalArgumentException exception = + assertThrows(IllegalArgumentException.class, () -> fieldValues.get("nonexistent")); assertNotNull(exception); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java index e4ec47b472..958e206598 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java @@ -16,10 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.api.client.util.Data; import com.google.api.services.bigquery.model.TableCell; @@ -27,8 +28,13 @@ import com.google.common.collect.ImmutableMap; import com.google.common.io.BaseEncoding; import java.math.BigDecimal; +import java.time.Duration; +import java.time.Period; +import java.util.LinkedHashMap; import java.util.Map; -import org.junit.Test; +import java.util.Map.Entry; +import org.junit.jupiter.api.Test; +import org.threeten.extra.PeriodDuration; public class FieldValueTest { @@ -43,9 +49,16 @@ public class FieldValueTest { ImmutableMap.of("v", "123456789.123456789"); private static final Map STRING_FIELD = ImmutableMap.of("v", "string"); private static final Map TIMESTAMP_FIELD = ImmutableMap.of("v", "42"); + private static final Map INTERVAL_FIELD_1 = + ImmutableMap.of("v", "P3Y2M1DT12H34M56.789S"); + private static final Map INTERVAL_FIELD_2 = + ImmutableMap.of("v", "3-2 1 12:34:56.789"); private static final Map BYTES_FIELD = ImmutableMap.of("v", BYTES_BASE64); private static final Map NULL_FIELD = ImmutableMap.of("v", Data.nullOf(String.class)); + + private static final Map RANGE_FIELD = ImmutableMap.of("v", "[start, end)"); + private static final Map REPEATED_FIELD = ImmutableMap.of("v", ImmutableList.of(INTEGER_FIELD, INTEGER_FIELD)); private static final Map RECORD_FIELD = @@ -65,20 +78,38 @@ public void testFromPb() { value = FieldValue.fromPb(GEOGRAPHY_FIELD); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); assertEquals("POINT(-122.350220 47.649154)", value.getStringValue()); + assertEquals("POINT(-122.350220 47.649154)", value.getStringValueOrDefault(null)); value = FieldValue.fromPb(NUMERIC_FIELD); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); assertEquals(new BigDecimal("123456789.123456789"), value.getNumericValue()); value = FieldValue.fromPb(STRING_FIELD); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); assertEquals("string", value.getStringValue()); + assertEquals("string", value.getStringValueOrDefault(null)); value = FieldValue.fromPb(TIMESTAMP_FIELD); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); assertEquals(42000000, value.getTimestampValue()); + value = FieldValue.fromPb(INTERVAL_FIELD_1); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); + PeriodDuration periodDuration = + PeriodDuration.of(Period.of(3, 2, 1), Duration.parse("PT12H34M56.789S")); + assertEquals(periodDuration, value.getPeriodDuration()); + assertEquals("P3Y2M1DT12H34M56.789S", value.getStringValue()); + assertEquals("P3Y2M1DT12H34M56.789S", value.getStringValueOrDefault(null)); + value = FieldValue.fromPb(INTERVAL_FIELD_2); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); + periodDuration = PeriodDuration.of(Period.of(3, 2, 1), Duration.parse("PT12H34M56.789S")); + assertEquals(periodDuration, value.getPeriodDuration()); + assertEquals("3-2 1 12:34:56.789", value.getStringValue()); + assertEquals("3-2 1 12:34:56.789", value.getStringValueOrDefault(null)); value = FieldValue.fromPb(BYTES_FIELD); assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); assertArrayEquals(BYTES, value.getBytesValue()); value = FieldValue.fromPb(NULL_FIELD); assertNull(value.getValue()); + value = FieldValue.fromPb(RANGE_FIELD); + assertEquals(FieldValue.Attribute.PRIMITIVE, value.getAttribute()); + assertEquals(Range.of(RANGE_FIELD.get("v")), value.getRangeValue()); value = FieldValue.fromPb(REPEATED_FIELD); assertEquals(FieldValue.Attribute.REPEATED, value.getAttribute()); assertEquals(FieldValue.fromPb(INTEGER_FIELD), value.getRepeatedValue().get(0)); @@ -87,6 +118,10 @@ public void testFromPb() { assertEquals(FieldValue.Attribute.RECORD, value.getAttribute()); assertEquals(FieldValue.fromPb(FLOAT_FIELD), value.getRepeatedValue().get(0)); assertEquals(FieldValue.fromPb(TIMESTAMP_FIELD), value.getRepeatedValue().get(1)); + value = FieldValue.fromPb(NULL_FIELD); + assertTrue(value.isNull()); + assertEquals(null, value.getStringValueOrDefault(null)); + assertEquals("defaultValue", value.getStringValueOrDefault("defaultValue")); } @Test @@ -97,6 +132,25 @@ public void testTimestamp() { assertEquals(expected, received); } + @Test + public void testInt64Timestamp() { + FieldValue lossyFieldValue = + FieldValue.of(FieldValue.Attribute.PRIMITIVE, "1.9954383398377106E10"); + long lossy = lossyFieldValue.getTimestampValue(); + + FieldValue losslessFieldValue = + FieldValue.of(FieldValue.Attribute.PRIMITIVE, "19954383398377106", true); + long lossless = losslessFieldValue.getTimestampValue(); + + assertEquals(lossy, lossless); + + FieldValue fieldValue = + FieldValue.of(FieldValue.Attribute.PRIMITIVE, "19954383398377106", true); + long received = fieldValue.getTimestampValue(); + long expected = 19954383398377106L; + assertEquals(expected, received); + } + @Test public void testEquals() { FieldValue booleanValue = FieldValue.of(FieldValue.Attribute.PRIMITIVE, "false"); @@ -136,6 +190,10 @@ public void testEquals() { assertEquals(nullValue, FieldValue.fromPb(NULL_FIELD)); assertEquals(nullValue.hashCode(), FieldValue.fromPb(NULL_FIELD).hashCode()); + FieldValue rangeValue = FieldValue.of(FieldValue.Attribute.PRIMITIVE, "[start, end)"); + assertEquals(rangeValue, FieldValue.fromPb(RANGE_FIELD)); + assertEquals(rangeValue.hashCode(), FieldValue.fromPb(RANGE_FIELD).hashCode()); + FieldValue repeatedValue = FieldValue.of(FieldValue.Attribute.REPEATED, ImmutableList.of(integerValue, integerValue)); assertEquals(repeatedValue, FieldValue.fromPb(REPEATED_FIELD)); @@ -146,4 +204,22 @@ public void testEquals() { assertEquals(recordValue, FieldValue.fromPb(RECORD_FIELD)); assertEquals(recordValue.hashCode(), FieldValue.fromPb(RECORD_FIELD).hashCode()); } + + @Test + public void testParseCanonicalInterval() { + Map intervalToPeriodDuration = new LinkedHashMap<>(); + intervalToPeriodDuration.put( + "125-7 -19 -0:24:12.001", PeriodDuration.parse("P125Y7M-19DT0H-24M-12.001S")); + intervalToPeriodDuration.put("-15-6 23 23:14:05", PeriodDuration.parse("P-15Y-6M23DT23H14M5S")); + intervalToPeriodDuration.put( + "06-01 06 01:01:00.123456", PeriodDuration.parse("P6Y1M6DT1H1M0.123456S")); + intervalToPeriodDuration.put("-0-0 -0 -0:0:0", PeriodDuration.parse("P0Y0M0DT0H0M0S")); + intervalToPeriodDuration.put( + "-99999-99999 9999 999:999:999.999999999", + PeriodDuration.parse("P-99999Y-99999M9999DT999H999M999.999999999S")); + for (Entry entry : intervalToPeriodDuration.entrySet()) { + assertEquals(FieldValue.parseCanonicalInterval(entry.getKey()), entry.getValue()); + System.out.println(FieldValue.parseCanonicalInterval(entry.getKey())); + } + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java new file mode 100644 index 0000000000..2dfacda542 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ForeignKeyTest.java @@ -0,0 +1,94 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ArrayList; +import java.util.Collections; +import org.junit.jupiter.api.Test; + +class ForeignKeyTest { + private static final TableId TABLE_ID = TableId.of("project", "dataset", "table"); + + private static final ColumnReference COLUMN_REFERENCE = + ColumnReference.newBuilder() + .setReferencingColumn("column1") + .setReferencedColumn("column2") + .build(); + private static final ForeignKey FOREIGN_KEY = + ForeignKey.newBuilder() + .setName("foreign_key") + .setReferencedTable(TABLE_ID) + .setColumnReferences(Collections.singletonList(COLUMN_REFERENCE)) + .build(); + + @Test + void testToBuilder() { + compareForeignKeyDefinition(FOREIGN_KEY, FOREIGN_KEY.toBuilder().build()); + TableId referencedTable = TableId.of("project1", "dataset1", "table1"); + ArrayList columnReferences = new ArrayList<>(); + columnReferences.add( + ColumnReference.newBuilder() + .setReferencingColumn("from") + .setReferencedColumn("to") + .build()); + columnReferences.add( + ColumnReference.newBuilder() + .setReferencingColumn("from2") + .setReferencedColumn("to2") + .build()); + ForeignKey foreignKey = + FOREIGN_KEY.toBuilder() + .setName("test") + .setReferencedTable(referencedTable) + .setColumnReferences(columnReferences) + .build(); + assertEquals("test", foreignKey.getName()); + assertEquals(referencedTable, foreignKey.getReferencedTable()); + assertEquals(columnReferences, foreignKey.getColumnReferences()); + } + + @Test + void testBuilder() { + assertEquals("foreign_key", FOREIGN_KEY.getName()); + assertEquals(TABLE_ID, FOREIGN_KEY.getReferencedTable()); + assertEquals(Collections.singletonList(COLUMN_REFERENCE), FOREIGN_KEY.getColumnReferences()); + ForeignKey foreignKey = + FOREIGN_KEY + .newBuilder() + .setName("foreign_key") + .setReferencedTable(TABLE_ID) + .setColumnReferences(Collections.singletonList(COLUMN_REFERENCE)) + .build(); + assertEquals(FOREIGN_KEY, foreignKey); + } + + @Test + void testToAndFromPb() { + ForeignKey foreignKey = FOREIGN_KEY.toBuilder().build(); + assertTrue(ForeignKey.fromPb(foreignKey.toPb()) instanceof ForeignKey); + compareForeignKeyDefinition(foreignKey, ForeignKey.fromPb(foreignKey.toPb())); + } + + private void compareForeignKeyDefinition(ForeignKey expected, ForeignKey value) { + assertEquals(expected.getName(), value.getName()); + assertEquals(expected.getReferencedTable(), value.getReferencedTable()); + assertEquals(expected.getColumnReferences(), value.getColumnReferences()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FormatOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FormatOptionsTest.java index 2427b90abd..e8642e86e2 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FormatOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FormatOptionsTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class FormatOptionsTest { @@ -41,6 +41,7 @@ public void testFactoryMethods() { assertEquals(FormatOptions.DATASTORE_BACKUP, FormatOptions.datastoreBackup().getType()); assertEquals(FormatOptions.AVRO, FormatOptions.avro().getType()); assertEquals(FormatOptions.GOOGLE_SHEETS, FormatOptions.googleSheets().getType()); + assertEquals(FormatOptions.ICEBERG, FormatOptions.iceberg().getType()); } @Test @@ -53,5 +54,6 @@ public void testEquals() { assertEquals( FormatOptions.datastoreBackup().hashCode(), FormatOptions.datastoreBackup().hashCode()); assertEquals(FormatOptions.googleSheets(), FormatOptions.googleSheets()); + assertEquals(FormatOptions.iceberg(), FormatOptions.iceberg()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java index d286358ca1..7aae673d3d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java @@ -18,7 +18,7 @@ import static com.google.common.truth.Truth.assertThat; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class GoogleSheetsOptionsTest { @@ -44,16 +44,14 @@ public void testToBuilder() { compareGoogleSheetsOptions( GOOGLE_SHEETS_OPTIONS_RANGE, GOOGLE_SHEETS_OPTIONS_RANGE.toBuilder().build()); GoogleSheetsOptions googleSheetsOptionsRange = - GOOGLE_SHEETS_OPTIONS_RANGE - .toBuilder() + GOOGLE_SHEETS_OPTIONS_RANGE.toBuilder() .setSkipLeadingRows(123) .setRange("sheet1!A1:A100") .build(); assertThat(googleSheetsOptionsRange.getSkipLeadingRows()).isEqualTo(123); assertThat(googleSheetsOptionsRange.getRange()).isEqualTo("sheet1!A1:A100"); googleSheetsOptionsRange = - googleSheetsOptionsRange - .toBuilder() + googleSheetsOptionsRange.toBuilder() .setSkipLeadingRows(SKIP_LEADING_ROWS) .setRange(RANGE) .build(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/HivePartitioningOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/HivePartitioningOptionsTest.java index 05bf05b9a1..401ab07b72 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/HivePartitioningOptionsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/HivePartitioningOptionsTest.java @@ -18,22 +18,26 @@ import static com.google.common.truth.Truth.assertThat; -import org.junit.Test; +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.Test; -public class HivePartitioningOptionsTest { +class HivePartitioningOptionsTest { private static final String MODE = "STRING"; private static final String SOURCE_URI_PREFIX = "gs://bucket/path_to_table"; private static final Boolean REQUIRE_PARTITION_FILTER = true; + private static final List FIELDS = Arrays.asList("FIELD1", "FIELD2"); private static final HivePartitioningOptions HIVE_PARTITIONING_OPTIONS = HivePartitioningOptions.newBuilder() .setMode(MODE) .setRequirePartitionFilter(REQUIRE_PARTITION_FILTER) .setSourceUriPrefix(SOURCE_URI_PREFIX) + .setFields(FIELDS) .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareHivePartitioningOptions( HIVE_PARTITIONING_OPTIONS, HIVE_PARTITIONING_OPTIONS.toBuilder().build()); HivePartitioningOptions options = HIVE_PARTITIONING_OPTIONS.toBuilder().setMode("AUTO").build(); @@ -43,13 +47,13 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { HivePartitioningOptions options = HivePartitioningOptions.newBuilder().build(); compareHivePartitioningOptions(options, options.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertThat(HIVE_PARTITIONING_OPTIONS.getMode()).isEqualTo(MODE); assertThat(HIVE_PARTITIONING_OPTIONS.getRequirePartitionFilter()) .isEqualTo(REQUIRE_PARTITION_FILTER); @@ -57,7 +61,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareHivePartitioningOptions( HIVE_PARTITIONING_OPTIONS, HivePartitioningOptions.fromPb(HIVE_PARTITIONING_OPTIONS.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllRequestTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllRequestTest.java index 4ee1ca13f3..d687e75b34 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllRequestTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllRequestTest.java @@ -17,6 +17,7 @@ package com.google.cloud.bigquery; import static com.google.common.truth.Truth.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -24,7 +25,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class InsertAllRequestTest { @@ -214,11 +215,11 @@ public void testEquals() { compareInsertAllRequest(INSERT_ALL_REQUEST11, INSERT_ALL_REQUEST11); } - @Test(expected = UnsupportedOperationException.class) + @Test public void testImmutable() { - InsertAllRequest.RowToInsert row = - InsertAllRequest.RowToInsert.of(new HashMap()); - row.getContent().put("zip", "zap"); + InsertAllRequest.RowToInsert row = InsertAllRequest.RowToInsert.of(new HashMap<>()); + + assertThrows(UnsupportedOperationException.class, () -> row.getContent().put("zip", "zap")); } @Test diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllResponseTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllResponseTest.java index b39066a6a2..9b30e25860 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllResponseTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllResponseTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class InsertAllResponseTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobIdTest.java index 05ae7cefe4..7934ad1204 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobIdTest.java @@ -16,17 +16,17 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class JobIdTest { +class JobIdTest { private static final JobId JOB = JobId.of("job"); private static final JobId JOB_COMPLETE = JobId.of("project", "job"); @Test - public void testOf() { + void testOf() { assertEquals(null, JOB.getProject()); assertEquals("job", JOB.getJob()); assertEquals("project", JOB_COMPLETE.getProject()); @@ -34,19 +34,19 @@ public void testOf() { } @Test - public void testEquals() { + void testEquals() { compareJobs(JOB, JobId.of("job")); compareJobs(JOB_COMPLETE, JobId.of("project", "job")); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareJobs(JOB, JobId.fromPb(JOB.toPb())); compareJobs(JOB_COMPLETE, JobId.fromPb(JOB_COMPLETE.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals(JOB_COMPLETE, JOB.setProjectId("project")); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobInfoTest.java index 71825f0a54..6c7f9b245c 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobInfoTest.java @@ -16,10 +16,10 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; @@ -32,7 +32,7 @@ import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class JobInfoTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java index 96bfa3f084..289548113b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java @@ -17,22 +17,36 @@ package com.google.cloud.bigquery; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.cloud.bigquery.JobStatistics.CopyStatistics; import com.google.cloud.bigquery.JobStatistics.ExtractStatistics; import com.google.cloud.bigquery.JobStatistics.LoadStatistics; import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.ExportDataStats; import com.google.cloud.bigquery.JobStatistics.ReservationUsage; import com.google.cloud.bigquery.JobStatistics.ScriptStatistics; import com.google.cloud.bigquery.JobStatistics.ScriptStatistics.ScriptStackFrame; +import com.google.cloud.bigquery.JobStatistics.SessionInfo; +import com.google.cloud.bigquery.JobStatistics.TransactionInfo; import com.google.cloud.bigquery.QueryStage.QueryStep; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import java.util.UUID; +import org.junit.jupiter.api.Test; public class JobStatisticsTest { + private static final BiEngineReason BI_ENGINE_REASON = + BiEngineReason.newBuilder() + .setMessage("Detected unsupported join type") + .setCode("UNSUPPORTED_SQL_TEXT") + .build(); + private static final BiEngineStats BI_ENGINE_STATS = + BiEngineStats.newBuilder() + .setBiEngineReasons(ImmutableList.of(BI_ENGINE_REASON)) + .setBiEngineMode("DISABLED") + .build(); private static final Integer BILLING_TIER = 42; private static final Boolean CACHE_HIT = true; private static final String DDL_OPERATION_PERFORMED = "SKIP"; @@ -42,6 +56,22 @@ public class JobStatisticsTest { private static final RoutineId DDL_TARGET_ROUTINE = RoutineId.of("alpha", "beta", "gamma"); private static final Long ESTIMATE_BYTES_PROCESSED = 101L; private static final Long NUM_DML_AFFECTED_ROWS = 88L; + private static final Long DELETED_ROW_COUNT = 10L; + private static final Long INSERTED_ROW_COUNT = 20L; + private static final Long UPDATED_ROW_COUNT = 30L; + private static final DmlStats DML_STATS = + DmlStats.newBuilder() + .setDeletedRowCount(DELETED_ROW_COUNT) + .setInsertedRowCount(INSERTED_ROW_COUNT) + .setUpdatedRowCount(UPDATED_ROW_COUNT) + .build(); + private static final Long EXPORT_DATA_STATS_ROW_COUNT = 3L; + private static final Long EXPORT_DATA_STATS_FILE_COUNT = 2L; + private static final ExportDataStats EXPORT_DATA_STATS = + ExportDataStats.newBuilder() + .setRowCount(EXPORT_DATA_STATS_ROW_COUNT) + .setFileCount(EXPORT_DATA_STATS_FILE_COUNT) + .build(); private static final QueryStatistics.StatementType STATEMENT_TYPE = QueryStatistics.StatementType.SELECT; private static final Long TOTAL_BYTES_BILLED = 24L; @@ -61,11 +91,18 @@ public class JobStatisticsTest { private static final Long START_TIME = 15L; private static final String NAME = "reservation-name"; private static final Long SLOTMS = 12545L; + private static final String TRANSACTION_ID = UUID.randomUUID().toString().substring(0, 8); + private static final String SESSION_ID = UUID.randomUUID().toString().substring(0, 8); + private static final Long COPIED_ROW = 1L; + private static final Long COPIED_LOGICAL_BYTES = 2L; private static final CopyStatistics COPY_STATISTICS = CopyStatistics.newBuilder() .setCreationTimestamp(CREATION_TIME) .setEndTime(END_TIME) .setStartTime(START_TIME) + .setCopiedRows(COPIED_ROW) + .setCopiedLogicalBytes(COPIED_LOGICAL_BYTES) + .setTotalSlotMs(TOTAL_SLOT_MS) .build(); private static final ExtractStatistics EXTRACT_STATISTICS = ExtractStatistics.newBuilder() @@ -73,6 +110,8 @@ public class JobStatisticsTest { .setEndTime(END_TIME) .setStartTime(START_TIME) .setDestinationUriFileCounts(FILE_COUNT) + .setInputBytes(INPUT_BYTES) + .setTotalSlotMs(TOTAL_SLOT_MS) .build(); private static final LoadStatistics LOAD_STATISTICS = LoadStatistics.newBuilder() @@ -84,6 +123,7 @@ public class JobStatisticsTest { .setOutputBytes(OUTPUT_BYTES) .setOutputRows(OUTPUT_ROWS) .setBadRecords(BAD_RECORDS) + .setTotalSlotMs(TOTAL_SLOT_MS) .build(); private static final LoadStatistics LOAD_STATISTICS_INCOMPLETE = LoadStatistics.newBuilder() @@ -93,6 +133,7 @@ public class JobStatisticsTest { .setInputBytes(INPUT_BYTES) .setInputFiles(INPUT_FILES) .setBadRecords(BAD_RECORDS) + .setTotalSlotMs(TOTAL_SLOT_MS) .build(); private static final List SUBSTEPS1 = ImmutableList.of("substep1", "substep2"); private static final List SUBSTEPS2 = ImmutableList.of("substep3", "substep4"); @@ -135,11 +176,23 @@ public class JobStatisticsTest { ImmutableList.of(TIMELINE_SAMPLE1, TIMELINE_SAMPLE2); private static final List QUERY_PLAN = ImmutableList.of(QUERY_STAGE); private static final Schema SCHEMA = Schema.of(Field.of("column", LegacySQLTypeName.DATETIME)); + private static final String UNUSED_INDEX_USAGE_MODE = "UNUSED"; + private static final SearchStats SEARCH_STATS = + SearchStats.newBuilder().setIndexUsageMode(UNUSED_INDEX_USAGE_MODE).build(); + + private static final MetadataCacheStats METADATA_CACHE_STATS = + MetadataCacheStats.newBuilder() + .setTableMetadataCacheUsage( + ImmutableList.of( + TableMetadataCacheUsage.newBuilder().setExplanation("test explanation").build())) + .build(); + private static final QueryStatistics QUERY_STATISTICS = QueryStatistics.newBuilder() .setCreationTimestamp(CREATION_TIME) .setEndTime(END_TIME) .setStartTime(START_TIME) + .setBiEngineStats(BI_ENGINE_STATS) .setBillingTier(BILLING_TIER) .setCacheHit(CACHE_HIT) .setDDLOperationPerformed(DDL_OPERATION_PERFORMED) @@ -147,6 +200,8 @@ public class JobStatisticsTest { .setDDLTargetRoutine(DDL_TARGET_ROUTINE) .setEstimatedBytesProcessed(ESTIMATE_BYTES_PROCESSED) .setNumDmlAffectedRows(NUM_DML_AFFECTED_ROWS) + .setDmlStats(DML_STATS) + .setExportDataStats(EXPORT_DATA_STATS) .setReferenceTables(REFERENCED_TABLES) .setStatementType(STATEMENT_TYPE) .setTotalBytesBilled(TOTAL_BYTES_BILLED) @@ -156,6 +211,8 @@ public class JobStatisticsTest { .setQueryPlan(QUERY_PLAN) .setTimeline(TIMELINE) .setSchema(SCHEMA) + .setSearchStats(SEARCH_STATS) + .setMetadataCacheStats(METADATA_CACHE_STATS) .build(); private static final QueryStatistics QUERY_STATISTICS_INCOMPLETE = QueryStatistics.newBuilder() @@ -164,6 +221,8 @@ public class JobStatisticsTest { .setStartTime(START_TIME) .setBillingTier(BILLING_TIER) .setCacheHit(CACHE_HIT) + .setSearchStats(SEARCH_STATS) + .setMetadataCacheStats(METADATA_CACHE_STATS) .build(); private static final ScriptStackFrame STATEMENT_STACK_FRAME = ScriptStackFrame.newBuilder() @@ -206,16 +265,32 @@ public class JobStatisticsTest { private static final ReservationUsage RESERVATION_USAGE = ReservationUsage.newBuilder().setName(NAME).setSlotMs(SLOTMS).build(); + private static final TransactionInfo TRANSACTION_INFO = + TransactionInfo.newbuilder().setTransactionId(TRANSACTION_ID).build(); + + private static final SessionInfo SESSION_INFO = + SessionInfo.newBuilder().setSessionId(SESSION_ID).build(); + @Test public void testBuilder() { assertEquals(CREATION_TIME, EXTRACT_STATISTICS.getCreationTime()); assertEquals(START_TIME, EXTRACT_STATISTICS.getStartTime()); assertEquals(END_TIME, EXTRACT_STATISTICS.getEndTime()); + assertEquals(TOTAL_SLOT_MS, EXTRACT_STATISTICS.getTotalSlotMs()); assertEquals(FILE_COUNT, EXTRACT_STATISTICS.getDestinationUriFileCounts()); + assertEquals(INPUT_BYTES, EXTRACT_STATISTICS.getInputBytes()); + + assertEquals(CREATION_TIME, COPY_STATISTICS.getCreationTime()); + assertEquals(START_TIME, COPY_STATISTICS.getStartTime()); + assertEquals(END_TIME, COPY_STATISTICS.getEndTime()); + assertEquals(TOTAL_SLOT_MS, COPY_STATISTICS.getTotalSlotMs()); + assertEquals(COPIED_LOGICAL_BYTES, COPY_STATISTICS.getCopiedLogicalBytes()); + assertEquals(COPIED_ROW, COPY_STATISTICS.getCopiedRows()); assertEquals(CREATION_TIME, LOAD_STATISTICS.getCreationTime()); assertEquals(START_TIME, LOAD_STATISTICS.getStartTime()); assertEquals(END_TIME, LOAD_STATISTICS.getEndTime()); + assertEquals(TOTAL_SLOT_MS, LOAD_STATISTICS.getTotalSlotMs()); assertEquals(INPUT_BYTES, LOAD_STATISTICS.getInputBytes()); assertEquals(INPUT_FILES, LOAD_STATISTICS.getInputFiles()); assertEquals(OUTPUT_BYTES, LOAD_STATISTICS.getOutputBytes()); @@ -225,6 +300,8 @@ public void testBuilder() { assertEquals(CREATION_TIME, QUERY_STATISTICS.getCreationTime()); assertEquals(START_TIME, QUERY_STATISTICS.getStartTime()); assertEquals(END_TIME, QUERY_STATISTICS.getEndTime()); + assertEquals(TOTAL_SLOT_MS, QUERY_STATISTICS.getTotalSlotMs()); + assertEquals(BI_ENGINE_STATS, QUERY_STATISTICS.getBiEngineStats()); assertEquals(BILLING_TIER, QUERY_STATISTICS.getBillingTier()); assertEquals(CACHE_HIT, QUERY_STATISTICS.getCacheHit()); assertEquals(DDL_OPERATION_PERFORMED, QUERY_STATISTICS.getDdlOperationPerformed()); @@ -232,12 +309,13 @@ public void testBuilder() { assertEquals(DDL_TARGET_ROUTINE, QUERY_STATISTICS.getDdlTargetRoutine()); assertEquals(ESTIMATE_BYTES_PROCESSED, QUERY_STATISTICS.getEstimatedBytesProcessed()); assertEquals(NUM_DML_AFFECTED_ROWS, QUERY_STATISTICS.getNumDmlAffectedRows()); + assertEquals(DML_STATS, QUERY_STATISTICS.getDmlStats()); + assertEquals(EXPORT_DATA_STATS, QUERY_STATISTICS.getExportDataStats()); assertEquals(REFERENCED_TABLES, QUERY_STATISTICS.getReferencedTables()); assertEquals(STATEMENT_TYPE, QUERY_STATISTICS.getStatementType()); assertEquals(TOTAL_BYTES_BILLED, QUERY_STATISTICS.getTotalBytesBilled()); assertEquals(TOTAL_BYTES_PROCESSED, QUERY_STATISTICS.getTotalBytesProcessed()); assertEquals(TOTAL_PARTITION_PROCESSED, QUERY_STATISTICS.getTotalPartitionsProcessed()); - assertEquals(TOTAL_SLOT_MS, QUERY_STATISTICS.getTotalSlotMs()); assertEquals(QUERY_PLAN, QUERY_STATISTICS.getQueryPlan()); assertEquals(TIMELINE, QUERY_STATISTICS.getTimeline()); @@ -275,12 +353,15 @@ public void testBuilder() { ImmutableList.of(EXPRESSION_STACK_FRAME), EXPRESSION_SCRIPT_STATISTICS.getStackFrames()); assertEquals(NAME, RESERVATION_USAGE.getName()); assertEquals(SLOTMS, RESERVATION_USAGE.getSlotMs()); + assertEquals(TRANSACTION_ID, TRANSACTION_INFO.getTransactionId()); + assertEquals(SESSION_ID, SESSION_INFO.getSessionId()); } @Test public void testToPbAndFromPb() { compareExtractStatistics( EXTRACT_STATISTICS, ExtractStatistics.fromPb(EXTRACT_STATISTICS.toPb())); + compareCopyStatistics(COPY_STATISTICS, CopyStatistics.fromPb(COPY_STATISTICS.toPb())); compareLoadStatistics(LOAD_STATISTICS, LoadStatistics.fromPb(LOAD_STATISTICS.toPb())); compareQueryStatistics(QUERY_STATISTICS, QueryStatistics.fromPb(QUERY_STATISTICS.toPb())); compareStatistics(COPY_STATISTICS, CopyStatistics.fromPb(COPY_STATISTICS.toPb())); @@ -300,6 +381,8 @@ public void testToPbAndFromPb() { compareStackFrames(stackFrame, ScriptStackFrame.fromPb(stackFrame.toPb())); } compareReservation(RESERVATION_USAGE, ReservationUsage.fromPb(RESERVATION_USAGE.toPb())); + compareTransactionInfo(TRANSACTION_INFO, TransactionInfo.fromPb(TRANSACTION_INFO.toPb())); + compareSessionInfo(SESSION_INFO, SessionInfo.fromPb(SESSION_INFO.toPb())); } @Test @@ -311,32 +394,45 @@ public void testIncomplete() { new com.google.api.services.bigquery.model.JobStatistics() .setCreationTime(1234L) .setStartTime(5678L)); + JobStatistics jobStatistics; job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setCopy(new com.google.api.services.bigquery.model.JobConfigurationTableCopy())); - assertThat(JobStatistics.fromPb(job)).isInstanceOf(CopyStatistics.class); + jobStatistics = JobStatistics.fromPb(job); + assertThat(jobStatistics).isInstanceOf(CopyStatistics.class); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setLoad(new com.google.api.services.bigquery.model.JobConfigurationLoad())); - assertThat(JobStatistics.fromPb(job)).isInstanceOf(LoadStatistics.class); + jobStatistics = JobStatistics.fromPb(job); + assertThat(jobStatistics).isInstanceOf(LoadStatistics.class); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setExtract(new com.google.api.services.bigquery.model.JobConfigurationExtract())); - assertThat(JobStatistics.fromPb(job)).isInstanceOf(ExtractStatistics.class); + jobStatistics = JobStatistics.fromPb(job); + assertThat(jobStatistics).isInstanceOf(ExtractStatistics.class); job.setConfiguration( new com.google.api.services.bigquery.model.JobConfiguration() .setQuery(new com.google.api.services.bigquery.model.JobConfigurationQuery())); - assertThat(JobStatistics.fromPb(job)).isInstanceOf(QueryStatistics.class); + jobStatistics = JobStatistics.fromPb(job); + assertThat(jobStatistics).isInstanceOf(QueryStatistics.class); } private void compareExtractStatistics(ExtractStatistics expected, ExtractStatistics value) { assertEquals(expected, value); compareStatistics(expected, value); assertEquals(expected.getDestinationUriFileCounts(), value.getDestinationUriFileCounts()); + assertEquals(expected.getInputBytes(), value.getInputBytes()); + } + + private void compareCopyStatistics(CopyStatistics expected, CopyStatistics value) { + assertEquals(expected, value); + compareStatistics(expected, value); + assertEquals(expected.getCopiedLogicalBytes(), value.getCopiedLogicalBytes()); + assertEquals(expected.getCopiedRows(), value.getCopiedRows()); } private void compareLoadStatistics(LoadStatistics expected, LoadStatistics value) { @@ -364,8 +460,13 @@ private void compareQueryStatistics(QueryStatistics expected, QueryStatistics va assertEquals(expected.getQueryPlan(), value.getQueryPlan()); assertEquals(expected.getReferencedTables(), value.getReferencedTables()); assertEquals(expected.getSchema(), value.getSchema()); + assertEquals( + expected.getSearchStats().getIndexUsageMode(), value.getSearchStats().getIndexUsageMode()); + assertEquals(expected.getMetadataCacheStats(), value.getMetadataCacheStats()); assertEquals(expected.getStatementType(), value.getStatementType()); assertEquals(expected.getTimeline(), value.getTimeline()); + assertEquals(expected.getDmlStats(), value.getDmlStats()); + assertEquals(expected.getExportDataStats(), value.getExportDataStats()); } private void compareStatistics(JobStatistics expected, JobStatistics value) { @@ -378,6 +479,7 @@ private void compareStatistics(JobStatistics expected, JobStatistics value) { assertEquals(expected.getNumChildJobs(), value.getNumChildJobs()); assertEquals(expected.getParentJobId(), value.getParentJobId()); assertEquals(expected.getScriptStatistics(), value.getScriptStatistics()); + assertEquals(expected.getTotalSlotMs(), value.getTotalSlotMs()); } private void compareScriptStatistics(ScriptStatistics expected, ScriptStatistics value) { @@ -409,4 +511,20 @@ private void compareReservation(ReservationUsage expected, ReservationUsage valu assertEquals(expected.getName(), value.getName()); assertEquals(expected.getSlotMs(), value.getSlotMs()); } + + private void compareTransactionInfo(TransactionInfo expected, TransactionInfo value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.toPb(), value.toPb()); + assertEquals(expected.getTransactionId(), value.getTransactionId()); + } + + private void compareSessionInfo(SessionInfo expected, SessionInfo value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.toPb(), value.toPb()); + assertEquals(expected.getSessionId(), value.getSessionId()); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatusTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatusTest.java index bb463d1cec..1c20b72404 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatusTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatusTest.java @@ -16,13 +16,13 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class JobStatusTest { +class JobStatusTest { private static final JobStatus.State STATE = JobStatus.State.DONE; private static final BigQueryError ERROR = @@ -36,7 +36,7 @@ public class JobStatusTest { private static final JobStatus JOB_STATUS_INCOMPLETE2 = new JobStatus(STATE, null, null); @Test - public void testConstructor() { + void testConstructor() { assertEquals(STATE, JOB_STATUS.getState()); assertEquals(ERROR, JOB_STATUS.getError()); assertEquals(ALL_ERRORS, JOB_STATUS.getExecutionErrors()); @@ -51,7 +51,7 @@ public void testConstructor() { } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareStatus(JOB_STATUS, JobStatus.fromPb(JOB_STATUS.toPb())); compareStatus(JOB_STATUS_INCOMPLETE1, JobStatus.fromPb(JOB_STATUS_INCOMPLETE1.toPb())); compareStatus(JOB_STATUS_INCOMPLETE2, JobStatus.fromPb(JOB_STATUS_INCOMPLETE2.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java index b4b17b7d96..f85c2f76c4 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java @@ -18,14 +18,14 @@ import static com.google.common.collect.ObjectArrays.concat; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; @@ -41,17 +41,14 @@ import com.google.cloud.bigquery.JobStatistics.QueryStatistics; import com.google.cloud.bigquery.JobStatus.State; import com.google.common.collect.ImmutableList; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.junit.MockitoRule; -import org.threeten.bp.Duration; - -@RunWith(MockitoJUnitRunner.class) -public class JobTest { +import java.time.Duration; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class JobTest { private static final JobId JOB_ID = JobId.of("project", "job"); private static final TableId TABLE_ID1 = TableId.of("dataset", "table1"); @@ -65,6 +62,10 @@ public class JobTest { CopyStatistics.newBuilder().setCreationTimestamp(1L).setEndTime(3L).setStartTime(2L).build(); private static final CopyJobConfiguration COPY_CONFIGURATION = CopyJobConfiguration.of(TABLE_ID1, TABLE_ID2); + private static final QueryJobConfiguration DDL_QUERY_CONFIGURATION = + QueryJobConfiguration.newBuilder("CREATE VIEW").setDestinationTable(TABLE_ID1).build(); + private static final QueryJobConfiguration DRL_QUERY_CONFIGURATION = + QueryJobConfiguration.newBuilder("SELECT 1").setDestinationTable(TABLE_ID1).build(); private static final JobInfo JOB_INFO = JobInfo.newBuilder(COPY_CONFIGURATION) .setJobId(JOB_ID) @@ -79,21 +80,24 @@ public class JobTest { private static final RetryOption[] TEST_RETRY_OPTIONS = new RetryOption[] { - RetryOption.totalTimeout(Duration.ofSeconds(3)), - RetryOption.initialRetryDelay(Duration.ofMillis(1L)), + RetryOption.totalTimeoutDuration(Duration.ofSeconds(3)), + RetryOption.initialRetryDelayDuration(Duration.ofMillis(1L)), RetryOption.jittered(false), RetryOption.retryDelayMultiplier(1.0) }; - @Rule public MockitoRule rule; + private static final BigQueryRetryConfig TEST_BIGQUERY_RETRY_CONFIG = + BigQueryRetryConfig.newBuilder() + .retryOnMessage(BigQueryErrorMessages.RATE_LIMIT_EXCEEDED_MSG) + .build(); private BigQuery bigquery; private BigQueryOptions mockOptions; private Job expectedJob; private Job job; - @Before - public void setUp() { + @BeforeEach + void setUp() { bigquery = mock(BigQuery.class); mockOptions = mock(BigQueryOptions.class); when(bigquery.getOptions()).thenReturn(mockOptions); @@ -102,7 +106,7 @@ public void setUp() { } @Test - public void testBuilder() { + void testBuilder() { Job builtJob = new Job.Builder(bigquery, COPY_CONFIGURATION) .setJobId(JOB_ID) @@ -126,12 +130,12 @@ public void testBuilder() { } @Test - public void testToBuilder() { + void testToBuilder() { compareJob(expectedJob, expectedJob.toBuilder().build()); } @Test - public void testExists_True() { + void testExists_True() { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields()}; when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(expectedJob); assertTrue(job.exists()); @@ -139,7 +143,7 @@ public void testExists_True() { } @Test - public void testExists_False() { + void testExists_False() { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields()}; when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(null); assertFalse(job.exists()); @@ -147,16 +151,14 @@ public void testExists_False() { } @Test - public void testIsDone_True() { - BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + void testIsDone_True() { Job job = expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.DONE)).build(); - when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(job); assertTrue(job.isDone()); - verify(bigquery).getJob(JOB_INFO.getJobId(), expectedOptions); + verify(bigquery, times(0)).getJob(eq(JOB_INFO.getJobId()), any()); } @Test - public void testIsDone_False() { + void testIsDone_False() { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; Job job = expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(job); @@ -165,15 +167,17 @@ public void testIsDone_False() { } @Test - public void testIsDone_NotExists() { + void testIsDone_NotExists() { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; + Job jobWithRunningState = + expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(null); - assertTrue(job.isDone()); + assertTrue(jobWithRunningState.isDone()); verify(bigquery).getJob(JOB_INFO.getJobId(), expectedOptions); } @Test - public void testWaitFor() throws InterruptedException { + void testWaitFor() throws InterruptedException { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; JobStatus status = mock(JobStatus.class); when(status.getState()).thenReturn(JobStatus.State.DONE); @@ -190,9 +194,7 @@ public void testWaitFor() throws InterruptedException { } @Test - public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { - QueryJobConfiguration jobConfig = - QueryJobConfiguration.newBuilder("CREATE VIEW").setDestinationTable(TABLE_ID1).build(); + void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -200,7 +202,7 @@ public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { .setStartTime(2L) .build(); JobInfo jobInfo = - JobInfo.newBuilder(jobConfig) + JobInfo.newBuilder(DDL_QUERY_CONFIGURATION) .setJobId(JOB_ID) .setStatistics(jobStatistics) .setJobId(JOB_ID) @@ -228,7 +230,7 @@ public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) .thenReturn(completedQuery); when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(completedJob); - job = this.job.toBuilder().setConfiguration(jobConfig).build(); + job = this.job.toBuilder().setConfiguration(DDL_QUERY_CONFIGURATION).build(); assertThat(job.waitFor(TEST_RETRY_OPTIONS)).isSameInstanceAs(completedJob); assertThat(job.getQueryResults().iterateAll()).isEmpty(); verify(bigquery, times(2)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); @@ -236,9 +238,7 @@ public void testWaitForAndGetQueryResultsEmpty() throws InterruptedException { } @Test - public void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedException { - QueryJobConfiguration jobConfig = - QueryJobConfiguration.newBuilder("CREATE VIEW").setDestinationTable(TABLE_ID1).build(); + void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedException { QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -246,7 +246,7 @@ public void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedExc .setStartTime(2L) .build(); JobInfo jobInfo = - JobInfo.newBuilder(jobConfig) + JobInfo.newBuilder(DDL_QUERY_CONFIGURATION) .setJobId(JOB_ID) .setStatistics(jobStatistics) .setJobId(JOB_ID) @@ -274,7 +274,7 @@ public void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedExc when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(completedJob); when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) .thenReturn(completedQuery); - job = this.job.toBuilder().setConfiguration(jobConfig).build(); + job = this.job.toBuilder().setConfiguration(DDL_QUERY_CONFIGURATION).build(); assertThat(job.waitFor(TEST_RETRY_OPTIONS)).isSameInstanceAs(completedJob); assertThat(job.getQueryResults().getSchema()) .isEqualTo(Schema.of(Field.of("field1", LegacySQLTypeName.BOOLEAN))); @@ -283,9 +283,7 @@ public void testWaitForAndGetQueryResultsEmptyWithSchema() throws InterruptedExc } @Test - public void testWaitForAndGetQueryResults() throws InterruptedException { - QueryJobConfiguration jobConfig = - QueryJobConfiguration.newBuilder("SELECT 1").setDestinationTable(TABLE_ID1).build(); + void testWaitForAndGetQueryResults() throws InterruptedException { QueryStatistics jobStatistics = QueryStatistics.newBuilder() .setCreationTimestamp(1L) @@ -293,7 +291,7 @@ public void testWaitForAndGetQueryResults() throws InterruptedException { .setStartTime(2L) .build(); JobInfo jobInfo = - JobInfo.newBuilder(jobConfig) + JobInfo.newBuilder(DRL_QUERY_CONFIGURATION) .setJobId(JOB_ID) .setStatistics(jobStatistics) .setJobId(JOB_ID) @@ -309,7 +307,12 @@ public void testWaitForAndGetQueryResults() throws InterruptedException { Job completedJob = expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); Page singlePage = Pages.empty(); - TableResult result = new TableResult(Schema.of(), 1, singlePage); + TableResult result = + TableResult.newBuilder() + .setSchema(Schema.of()) + .setTotalRows(1L) + .setPageNoSchema(singlePage) + .build(); QueryResponse completedQuery = QueryResponse.newBuilder() .setCompleted(true) @@ -324,7 +327,7 @@ public void testWaitForAndGetQueryResults() throws InterruptedException { when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) .thenReturn(completedQuery); when(bigquery.listTableData(eq(TABLE_ID1), any(Schema.class))).thenReturn(result); - job = this.job.toBuilder().setConfiguration(jobConfig).build(); + job = this.job.toBuilder().setConfiguration(DRL_QUERY_CONFIGURATION).build(); assertThat(job.waitFor(TEST_RETRY_OPTIONS)).isSameInstanceAs(completedJob); assertThat(job.getQueryResults().iterateAll()).hasSize(0); verify(bigquery, times(2)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); @@ -332,17 +335,14 @@ public void testWaitForAndGetQueryResults() throws InterruptedException { } @Test - public void testWaitForAndGetQueryResults_Unsupported() throws InterruptedException { - try { - job.getQueryResults(); - Assert.fail(); - } catch (UnsupportedOperationException expected) { - Assert.assertNotNull(expected.getMessage()); - } + void testWaitForAndGetQueryResults_Unsupported() throws InterruptedException { + UnsupportedOperationException expected = + assertThrows(UnsupportedOperationException.class, () -> job.getQueryResults()); + assertNotNull(expected.getMessage()); } @Test - public void testWaitFor_Null() throws InterruptedException { + void testWaitFor_Null() throws InterruptedException { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(null); @@ -351,7 +351,7 @@ public void testWaitFor_Null() throws InterruptedException { } @Test - public void testWaitForWithCheckingPeriod() throws InterruptedException { + void testWaitForWithCheckingPeriod() throws InterruptedException { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; JobStatus status = mock(JobStatus.class); when(status.getState()).thenReturn(JobStatus.State.RUNNING); @@ -373,7 +373,7 @@ public void testWaitForWithCheckingPeriod() throws InterruptedException { } @Test - public void testWaitForWithCheckingPeriod_Null() throws InterruptedException { + void testWaitForWithCheckingPeriod_Null() throws InterruptedException { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); Job runningJob = @@ -386,23 +386,168 @@ public void testWaitForWithCheckingPeriod_Null() throws InterruptedException { } @Test - public void testWaitForWithTimeout() throws InterruptedException { + void testWaitForWithTimeout() throws InterruptedException { BigQuery.JobOption[] expectedOptions = {BigQuery.JobOption.fields(BigQuery.JobField.STATUS)}; when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); Job runningJob = expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(runningJob); when(bigquery.getJob(JOB_INFO.getJobId(), expectedOptions)).thenReturn(runningJob); - try { - job.waitFor(concat(TEST_RETRY_OPTIONS, RetryOption.totalTimeout(Duration.ofMillis(3)))); - Assert.fail(); - } catch (BigQueryException expected) { - Assert.assertNotNull(expected.getMessage()); - } + BigQueryException expected = + assertThrows( + BigQueryException.class, + () -> + job.waitFor( + concat( + TEST_RETRY_OPTIONS, + RetryOption.totalTimeoutDuration(Duration.ofMillis(3))))); + assertNotNull(expected.getMessage()); + } + + @Test + void testWaitForWithBigQueryRetryConfig() throws InterruptedException { + QueryStatistics jobStatistics = + QueryStatistics.newBuilder() + .setCreationTimestamp(1L) + .setEndTime(3L) + .setStartTime(2L) + .build(); + JobInfo jobInfo = + JobInfo.newBuilder(DRL_QUERY_CONFIGURATION) + .setJobId(JOB_ID) + .setStatistics(jobStatistics) + .setJobId(JOB_ID) + .setEtag(ETAG) + .setGeneratedId(GENERATED_ID) + .setSelfLink(SELF_LINK) + .setUserEmail(EMAIL) + .setStatus(JOB_STATUS) + .build(); + + when(bigquery.getOptions()).thenReturn(mockOptions); + when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); + Job completedJob = + expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); + QueryResponse completedQuery = + QueryResponse.newBuilder() + .setCompleted(true) + .setTotalRows(1) // Lies to force call of listTableData(). + .setSchema(Schema.of(Field.of("_f0", LegacySQLTypeName.INTEGER))) + .setErrors(ImmutableList.of()) + .build(); + + when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(completedJob); + when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) + .thenReturn(completedQuery); + job = this.job.toBuilder().setConfiguration(DRL_QUERY_CONFIGURATION).build(); + assertThat(job.waitFor(TEST_BIGQUERY_RETRY_CONFIG, TEST_RETRY_OPTIONS)) + .isSameInstanceAs(completedJob); + verify(bigquery, times(1)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); + verify(bigquery).getJob(JOB_INFO.getJobId()); + } + + @Test + void testWaitForWithBigQueryRetryConfigShouldRetry() throws InterruptedException { + QueryStatistics jobStatistics = + QueryStatistics.newBuilder() + .setCreationTimestamp(1L) + .setEndTime(3L) + .setStartTime(2L) + .build(); + JobInfo jobInfo = + JobInfo.newBuilder(DRL_QUERY_CONFIGURATION) + .setJobId(JOB_ID) + .setStatistics(jobStatistics) + .setJobId(JOB_ID) + .setEtag(ETAG) + .setGeneratedId(GENERATED_ID) + .setSelfLink(SELF_LINK) + .setUserEmail(EMAIL) + .setStatus(JOB_STATUS) + .build(); + + when(bigquery.getOptions()).thenReturn(mockOptions); + when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); + Job completedJob = + expectedJob.toBuilder().setStatus(new JobStatus(JobStatus.State.RUNNING)).build(); + QueryResponse completedQuery = + QueryResponse.newBuilder() + .setCompleted(true) + .setTotalRows(1) // Lies to force call of listTableData(). + .setSchema(Schema.of(Field.of("_f0", LegacySQLTypeName.INTEGER))) + .setErrors(ImmutableList.of()) + .build(); + + when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(completedJob); + BigQueryError bigQueryError = + new BigQueryError( + "testReasonRateLimitExceeded", "US", "testMessage: Exceeded rate limits:"); + + ImmutableList bigQueryErrorList = ImmutableList.of(bigQueryError); + BigQueryException bigQueryException = new BigQueryException(bigQueryErrorList); + when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) + .thenThrow(bigQueryException) + .thenReturn(completedQuery); + job = this.job.toBuilder().setConfiguration(DRL_QUERY_CONFIGURATION).build(); + assertThat(job.waitFor(TEST_BIGQUERY_RETRY_CONFIG, TEST_RETRY_OPTIONS)) + .isSameInstanceAs(completedJob); + // Verify that getQueryResults is attempted twice. First during bigQueryException with "Exceeded + // rate limits" error message and the second successful attempt. + verify(bigquery, times(2)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); + verify(bigquery).getJob(JOB_INFO.getJobId()); + } + + @Test + void testWaitForWithBigQueryRetryConfigErrorShouldNotRetry() throws InterruptedException { + QueryStatistics jobStatistics = + QueryStatistics.newBuilder() + .setCreationTimestamp(1L) + .setEndTime(3L) + .setStartTime(2L) + .build(); + JobInfo jobInfo = + JobInfo.newBuilder(DRL_QUERY_CONFIGURATION) + .setJobId(JOB_ID) + .setStatistics(jobStatistics) + .setJobId(JOB_ID) + .setEtag(ETAG) + .setGeneratedId(GENERATED_ID) + .setSelfLink(SELF_LINK) + .setUserEmail(EMAIL) + .setStatus(JOB_STATUS) + .build(); + + when(bigquery.getOptions()).thenReturn(mockOptions); + when(mockOptions.getClock()).thenReturn(CurrentMillisClock.getDefaultClock()); + QueryResponse completedQuery = + QueryResponse.newBuilder() + .setCompleted(true) + .setTotalRows(1) // Lies to force call of listTableData(). + .setSchema(Schema.of(Field.of("_f0", LegacySQLTypeName.INTEGER))) + .setErrors(ImmutableList.of()) + .build(); + + BigQueryError bigQueryError = + new BigQueryError("testReasonRateLimitExceeded", "US", "testMessage: do not retry error"); + + ImmutableList bigQueryErrorList = ImmutableList.of(bigQueryError); + BigQueryException bigQueryException = new BigQueryException(bigQueryErrorList); + when(bigquery.getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS)) + .thenThrow(bigQueryException) + .thenReturn(completedQuery); + job = this.job.toBuilder().setConfiguration(DRL_QUERY_CONFIGURATION).build(); + BigQueryException e = + assertThrows( + BigQueryException.class, + () -> job.waitFor(TEST_BIGQUERY_RETRY_CONFIG, TEST_RETRY_OPTIONS)); + assertNotNull(e.getErrors()); + // Verify that getQueryResults is attempted only once and not retried since the error message + // does not match. + verify(bigquery, times(1)).getQueryResults(jobInfo.getJobId(), Job.DEFAULT_QUERY_WAIT_OPTIONS); } @Test - public void testReload() { + void testReload() { JobInfo updatedInfo = JOB_INFO.toBuilder().setEtag("etag").build(); Job expectedJob = new Job(bigquery, new JobInfo.BuilderImpl(updatedInfo)); when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(expectedJob); @@ -412,7 +557,7 @@ public void testReload() { } @Test - public void testReloadJobException() { + void testReloadJobException() { JobInfo updatedInfo = JOB_INFO.toBuilder().setEtag("etag").build(); Job expectedJob = new Job(bigquery, new JobInfo.BuilderImpl(updatedInfo)); BigQueryError bigQueryError = new BigQueryError("invalidQuery", "US", "invalidQuery"); @@ -421,23 +566,19 @@ public void testReloadJobException() { ImmutableList bigQueryErrorList = ImmutableList.of(bigQueryError); BigQueryException bigQueryException = new BigQueryException(bigQueryErrorList); when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(expectedJob).thenThrow(bigQueryException); - try { - job.reload(); - fail("JobException expected"); - } catch (BigQueryException e) { - assertNotNull(e.getErrors()); - } + BigQueryException e = assertThrows(BigQueryException.class, () -> job.reload()); + assertNotNull(e.getErrors()); } @Test - public void testReloadNull() { + void testReloadNull() { when(bigquery.getJob(JOB_INFO.getJobId())).thenReturn(null); assertNull(job.reload()); verify(bigquery).getJob(JOB_INFO.getJobId()); } @Test - public void testReloadWithOptions() { + void testReloadWithOptions() { JobInfo updatedInfo = JOB_INFO.toBuilder().setEtag("etag").build(); Job expectedJob = new Job(bigquery, new JobInfo.BuilderImpl(updatedInfo)); when(bigquery.getJob(JOB_INFO.getJobId(), BigQuery.JobOption.fields())).thenReturn(expectedJob); @@ -447,24 +588,24 @@ public void testReloadWithOptions() { } @Test - public void testCancel() { + void testCancel() { when(bigquery.cancel(JOB_INFO.getJobId())).thenReturn(true); assertTrue(job.cancel()); verify(bigquery).cancel(JOB_INFO.getJobId()); } @Test - public void testBigQuery() { + void testBigQuery() { assertSame(bigquery, expectedJob.getBigQuery()); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareJob(expectedJob, Job.fromPb(bigquery, expectedJob.toPb())); } @Test - public void testToAndFromPbWithoutConfiguration() { + void testToAndFromPbWithoutConfiguration() { assertNotEquals(expectedJob, bigquery); compareJob(expectedJob, Job.fromPb(bigquery, expectedJob.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LabelsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LabelsTest.java deleted file mode 100644 index 787dd77df7..0000000000 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LabelsTest.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2018 Google LLC - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.google.cloud.bigquery; - -import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.fail; - -import com.google.api.client.util.Data; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import org.junit.Test; - -public class LabelsTest { - @Test - public void testFromUser() { - assertThat(Labels.fromUser(null).userMap()).isNull(); - - HashMap user = new HashMap<>(); - assertThat(Labels.fromUser(user).userMap()).isEmpty(); - - user.put("a", "b"); - Labels labels = Labels.fromUser(user); - assertThat(labels.userMap()).containsExactly("a", "b"); - - // Changing map afterwards does not change the labels. - user.put("c", "d"); - assertThat(labels.userMap()).containsExactly("a", "b"); - } - - @Test - public void testFromToPb() { - assertThat(Labels.fromPb(null).toPb()).isNull(); - - HashMap pb = new HashMap<>(); - assertThat(Labels.fromPb(pb).toPb()).isNull(); - - pb.put("a", "b"); - assertThat(Labels.fromPb(pb).toPb()).isEqualTo(pb); - - pb.put("c", Data.NULL_STRING); - assertThat(Labels.fromPb(pb).toPb()).isEqualTo(pb); - - Map jsonNullMap = Data.nullOf(HashMap.class); - assertThat(Data.isNull(Labels.fromPb(jsonNullMap).toPb())).isTrue(); - } - - @Test - public void testNullKey() { - try { - Labels.fromUser(Collections.singletonMap((String) null, "foo")); - fail("null key shouldn't work"); - } catch (IllegalArgumentException e) { - } - - try { - Labels.fromPb(Collections.singletonMap((String) null, "foo")); - fail("null key shouldn't work"); - } catch (IllegalArgumentException e) { - } - } -} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java index 9f42d62b72..1a9db29951 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java @@ -16,20 +16,21 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; import com.google.cloud.bigquery.JobInfo.WriteDisposition; +import com.google.cloud.bigquery.LoadJobConfiguration.SourceColumnMatch; import com.google.cloud.bigquery.TimePartitioning.Type; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class LoadJobConfigurationTest { +class LoadJobConfigurationTest { private static final String TEST_PROJECT_ID = "test-project-id"; private static final CsvOptions CSV_OPTIONS = @@ -37,6 +38,7 @@ public class LoadJobConfigurationTest { .setAllowJaggedRows(true) .setAllowQuotedNewLines(false) .setEncoding(StandardCharsets.UTF_8) + .setPreserveAsciiControlCharacters(true) .build(); private static final TableId TABLE_ID = TableId.of("dataset", "table"); private static final CreateDisposition CREATE_DISPOSITION = CreateDisposition.CREATE_IF_NEEDED; @@ -50,11 +52,15 @@ public class LoadJobConfigurationTest { .setDescription("FieldDescription") .build(); private static final List SOURCE_URIS = ImmutableList.of("uri1", "uri2"); + private static final List DECIMAL_TARGET_TYPES = + ImmutableList.of("NUMERIC", "BIGNUMERIC", "STRING"); private static final List SCHEMA_UPDATE_OPTIONS = ImmutableList.of(SchemaUpdateOption.ALLOW_FIELD_ADDITION); private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA); private static final Boolean AUTODETECT = true; - private static final Boolean USERAVROLOGICALTYPES = true; + private static final Boolean USE_AVRO_LOGICAL_TYPES = true; + + private static final boolean CREATE_SESSION = true; private static final EncryptionConfiguration JOB_ENCRYPTION_CONFIGURATION = EncryptionConfiguration.newBuilder().setKmsKeyName("KMS_KEY_1").build(); private static final TimePartitioning TIME_PARTITIONING = TimePartitioning.of(Type.DAY); @@ -63,12 +69,27 @@ public class LoadJobConfigurationTest { private static final Map LABELS = ImmutableMap.of("test-job-name", "test-load-job"); private static final Long TIMEOUT = 10L; + private static final String RESERVATION = "reservation"; private static final RangePartitioning.Range RANGE = RangePartitioning.Range.newBuilder().setStart(1L).setInterval(2L).setEnd(10L).build(); private static final RangePartitioning RANGE_PARTITIONING = RangePartitioning.newBuilder().setField("IntegerField").setRange(RANGE).build(); private static final String MODE = "STRING"; private static final String SOURCE_URI_PREFIX = "gs://bucket/path_to_table"; + + private static final String KEY = "session_id"; + private static final String VALUE = "session_id_1234567890"; + private static final String TIME_ZONE = "America/Los_Angeles"; + private static final String DATE_FORMAT = "YYYY-MM-DD"; + private static final String DATETIME_FORMAT = "YYYY-MM-DD HH:MI:SS"; + private static final String TIME_FORMAT = "HH:MI:SS"; + private static final String TIMESTAMP_FORMAT = "YYYY-MM-DD HH:MI:SS"; + private static final SourceColumnMatch SOURCE_COLUMN_MATCH = SourceColumnMatch.POSITION; + private static final List NULL_MARKERS = ImmutableList.of("SQL NULL", "TEST MARKER"); + private static final ConnectionProperty CONNECTION_PROPERTY = + ConnectionProperty.newBuilder().setKey(KEY).setValue(VALUE).build(); + private static final List CONNECTION_PROPERTIES = + ImmutableList.of(CONNECTION_PROPERTY); private static final HivePartitioningOptions HIVE_PARTITIONING_OPTIONS = HivePartitioningOptions.newBuilder() .setMode(MODE) @@ -76,9 +97,12 @@ public class LoadJobConfigurationTest { .build(); private static final LoadJobConfiguration LOAD_CONFIGURATION_CSV = LoadJobConfiguration.newBuilder(TABLE_ID, SOURCE_URIS) + .setDecimalTargetTypes(DECIMAL_TARGET_TYPES) .setCreateDisposition(CREATE_DISPOSITION) .setWriteDisposition(WRITE_DISPOSITION) .setFormatOptions(CSV_OPTIONS) + .setFileSetSpecType("FILE_SET_SPEC_TYPE_FILE_SYSTEM_MATCH") + .setColumnNameCharacterMap("STRICT") .setIgnoreUnknownValues(IGNORE_UNKNOWN_VALUES) .setMaxBadRecords(MAX_BAD_RECORDS) .setSchema(TABLE_SCHEMA) @@ -92,6 +116,16 @@ public class LoadJobConfigurationTest { .setRangePartitioning(RANGE_PARTITIONING) .setNullMarker("nullMarker") .setHivePartitioningOptions(HIVE_PARTITIONING_OPTIONS) + .setConnectionProperties(CONNECTION_PROPERTIES) + .setCreateSession(CREATE_SESSION) + .setReservation(RESERVATION) + .setTimeZone(TIME_ZONE) + .setDateFormat(DATE_FORMAT) + .setDatetimeFormat(DATETIME_FORMAT) + .setTimeFormat(TIME_FORMAT) + .setTimestampFormat(TIMESTAMP_FORMAT) + .setSourceColumnMatch(SOURCE_COLUMN_MATCH) + .setNullMarkers(NULL_MARKERS) .build(); private static final DatastoreBackupOptions BACKUP_OPTIONS = @@ -111,6 +145,7 @@ public class LoadJobConfigurationTest { .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) .setRangePartitioning(RANGE_PARTITIONING) + .setReservation(RESERVATION) .build(); private static final LoadJobConfiguration LOAD_CONFIGURATION_AVRO = LoadJobConfiguration.newBuilder(TABLE_ID, SOURCE_URIS) @@ -125,18 +160,18 @@ public class LoadJobConfigurationTest { .setDestinationEncryptionConfiguration(JOB_ENCRYPTION_CONFIGURATION) .setTimePartitioning(TIME_PARTITIONING) .setClustering(CLUSTERING) - .setUseAvroLogicalTypes(USERAVROLOGICALTYPES) + .setUseAvroLogicalTypes(USE_AVRO_LOGICAL_TYPES) .setLabels(LABELS) .setJobTimeoutMs(TIMEOUT) .setRangePartitioning(RANGE_PARTITIONING) + .setReservation(RESERVATION) .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareLoadJobConfiguration(LOAD_CONFIGURATION_CSV, LOAD_CONFIGURATION_CSV.toBuilder().build()); LoadJobConfiguration configurationCSV = - LOAD_CONFIGURATION_CSV - .toBuilder() + LOAD_CONFIGURATION_CSV.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", configurationCSV.getDestinationTable().getTable()); @@ -146,8 +181,7 @@ public void testToBuilder() { compareLoadJobConfiguration( LOAD_CONFIGURATION_BACKUP, LOAD_CONFIGURATION_BACKUP.toBuilder().build()); LoadJobConfiguration configurationBackup = - LOAD_CONFIGURATION_BACKUP - .toBuilder() + LOAD_CONFIGURATION_BACKUP.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", configurationBackup.getDestinationTable().getTable()); @@ -157,8 +191,7 @@ public void testToBuilder() { compareLoadJobConfiguration( LOAD_CONFIGURATION_AVRO, LOAD_CONFIGURATION_AVRO.toBuilder().build()); LoadJobConfiguration configurationAvro = - LOAD_CONFIGURATION_AVRO - .toBuilder() + LOAD_CONFIGURATION_AVRO.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", configurationAvro.getDestinationTable().getTable()); @@ -167,7 +200,7 @@ public void testToBuilder() { } @Test - public void testOf() { + void testOf() { LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); assertEquals(TABLE_ID, configuration.getDestinationTable()); assertEquals(SOURCE_URIS, configuration.getSourceUris()); @@ -187,13 +220,13 @@ public void testOf() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); compareLoadJobConfiguration(configuration, configuration.toBuilder().build()); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareLoadJobConfiguration( LOAD_CONFIGURATION_CSV, LoadJobConfiguration.fromPb(LOAD_CONFIGURATION_CSV.toPb())); LoadJobConfiguration configuration = LoadJobConfiguration.of(TABLE_ID, SOURCE_URIS); @@ -201,16 +234,15 @@ public void testToPbAndFromPb() { } @Test - public void testSetProjectId() { + void testSetProjectId() { LoadConfiguration configuration = LOAD_CONFIGURATION_CSV.setProjectId(TEST_PROJECT_ID); assertEquals(TEST_PROJECT_ID, configuration.getDestinationTable().getProject()); } @Test - public void testSetProjectIdDoNotOverride() { + void testSetProjectIdDoNotOverride() { LoadConfiguration configuration = - LOAD_CONFIGURATION_CSV - .toBuilder() + LOAD_CONFIGURATION_CSV.toBuilder() .setDestinationTable(TABLE_ID.setProjectId(TEST_PROJECT_ID)) .build() .setProjectId("do-not-update"); @@ -218,7 +250,7 @@ public void testSetProjectIdDoNotOverride() { } @Test - public void testGetType() { + void testGetType() { assertEquals(JobConfiguration.Type.LOAD, LOAD_CONFIGURATION_CSV.getType()); } @@ -226,8 +258,11 @@ private void compareLoadJobConfiguration( LoadJobConfiguration expected, LoadJobConfiguration value) { assertEquals(expected, value); assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.getFileSetSpecType(), value.getFileSetSpecType()); + assertEquals(expected.getColumnNameCharacterMap(), value.getColumnNameCharacterMap()); assertEquals(expected.toString(), value.toString()); assertEquals(expected.getDestinationTable(), value.getDestinationTable()); + assertEquals(expected.getDecimalTargetTypes(), value.getDecimalTargetTypes()); assertEquals(expected.getCreateDisposition(), value.getCreateDisposition()); assertEquals(expected.getWriteDisposition(), value.getWriteDisposition()); assertEquals(expected.getCsvOptions(), value.getCsvOptions()); @@ -249,5 +284,15 @@ private void compareLoadJobConfiguration( assertEquals(expected.getRangePartitioning(), value.getRangePartitioning()); assertEquals(expected.getNullMarker(), value.getNullMarker()); assertEquals(expected.getHivePartitioningOptions(), value.getHivePartitioningOptions()); + assertEquals(expected.getConnectionProperties(), value.getConnectionProperties()); + assertEquals(expected.getCreateSession(), value.getCreateSession()); + assertEquals(expected.getReservation(), value.getReservation()); + assertEquals(expected.getTimeZone(), value.getTimeZone()); + assertEquals(expected.getDateFormat(), value.getDateFormat()); + assertEquals(expected.getDatetimeFormat(), value.getDatetimeFormat()); + assertEquals(expected.getTimeFormat(), value.getTimeFormat()); + assertEquals(expected.getTimestampFormat(), value.getTimestampFormat()); + assertEquals(expected.getSourceColumnMatch(), value.getSourceColumnMatch()); + assertEquals(expected.getNullMarkers(), value.getNullMarkers()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MaterializedViewDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MaterializedViewDefinitionTest.java index ab1caab07e..00ac64937f 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MaterializedViewDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MaterializedViewDefinitionTest.java @@ -16,10 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.junit.Test; +import com.google.common.collect.ImmutableList; +import org.junit.jupiter.api.Test; public class MaterializedViewDefinitionTest { @@ -28,6 +29,10 @@ public class MaterializedViewDefinitionTest { private static final Boolean ENABLE_REFRESH = false; private static final Long REFRESH_INTERVAL_MS = 60000L; private static final Schema SCHEMA = Schema.of(); + private static final TimePartitioning TIME_PARTITIONING = + TimePartitioning.of(TimePartitioning.Type.DAY, 42); + private static final Clustering CLUSTERING = + Clustering.newBuilder().setFields(ImmutableList.of("Foo", "Bar")).build(); private static final MaterializedViewDefinition MATERIALIZED_VIEW_DEFINITION = MaterializedViewDefinition.newBuilder() .setSchema(SCHEMA) @@ -35,6 +40,8 @@ public class MaterializedViewDefinitionTest { .setLastRefreshTime(LAST_REFRESH_TIME) .setEnableRefresh(ENABLE_REFRESH) .setRefreshIntervalMs(REFRESH_INTERVAL_MS) + .setClustering(CLUSTERING) + .setTimePartitioning(TIME_PARTITIONING) .build(); @Test @@ -68,6 +75,8 @@ public void testBuilder() { .setLastRefreshTime(LAST_REFRESH_TIME) .setEnableRefresh(ENABLE_REFRESH) .setRefreshIntervalMs(REFRESH_INTERVAL_MS) + .setClustering(CLUSTERING) + .setTimePartitioning(TIME_PARTITIONING) .build(); assertEquals(MATERIALIZED_VIEW_DEFINITION, materializedViewDefinition); } @@ -92,6 +101,8 @@ private void compareMaterializedView( assertEquals(expected.getLastRefreshTime(), actual.getLastRefreshTime()); assertEquals(expected.getEnableRefresh(), actual.getEnableRefresh()); assertEquals(expected.getRefreshIntervalMs(), actual.getRefreshIntervalMs()); + assertEquals(expected.getClustering(), actual.getClustering()); + assertEquals(expected.getTimePartitioning(), actual.getTimePartitioning()); assertEquals(expected.toString(), actual.toString()); assertEquals(expected.hashCode(), actual.hashCode()); assertEquals(expected, actual); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MetadataCacheStatsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MetadataCacheStatsTest.java new file mode 100644 index 0000000000..d417bfc7fa --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/MetadataCacheStatsTest.java @@ -0,0 +1,60 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.google.api.services.bigquery.model.MetadataCacheStatistics; +import com.google.common.collect.ImmutableList; +import com.google.common.truth.Truth; +import java.util.List; +import java.util.stream.Collectors; +import org.junit.jupiter.api.Test; + +class MetadataCacheStatsTest { + private static List + TABLE_METADATA_CACHE_USAGE_PB_LIST = + ImmutableList.of( + new com.google.api.services.bigquery.model.TableMetadataCacheUsage() + .setExplanation("test explanation")); + + private static final MetadataCacheStats METADATA_CACHE_STATS = + MetadataCacheStats.newBuilder() + .setTableMetadataCacheUsage( + TABLE_METADATA_CACHE_USAGE_PB_LIST.stream() + .map(TableMetadataCacheUsage::fromPb) + .collect(Collectors.toList())) + .build(); + + private static final MetadataCacheStatistics METADATA_CACHE_STATISTICS_PB = + new MetadataCacheStatistics().setTableMetadataCacheUsage(TABLE_METADATA_CACHE_USAGE_PB_LIST); + + @Test + void testToPbAndFromPb() { + assertEquals(METADATA_CACHE_STATISTICS_PB, METADATA_CACHE_STATS.toPb()); + compareMetadataCacheStats( + METADATA_CACHE_STATS, MetadataCacheStats.fromPb(METADATA_CACHE_STATISTICS_PB)); + } + + private void compareMetadataCacheStats(MetadataCacheStats expected, MetadataCacheStats value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + Truth.assertThat( + expected.getTableMetadataCacheUsage().containsAll(value.getTableMetadataCacheUsage())); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelIdTest.java index 266a754c44..98df2d5f26 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelIdTest.java @@ -16,17 +16,17 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class ModelIdTest { +class ModelIdTest { public static final ModelId MODEL = ModelId.of("dataset", "model"); public static final ModelId MODEL_COMPLETE = ModelId.of("project", "dataset", "model"); @Test - public void testOf() { + void testOf() { assertEquals(null, MODEL.getProject()); assertEquals("dataset", MODEL.getDataset()); assertEquals("model", MODEL.getModel()); @@ -37,19 +37,19 @@ public void testOf() { } @Test - public void testEquals() { + void testEquals() { compareModelIds(MODEL, ModelId.of("dataset", "model")); compareModelIds(MODEL_COMPLETE, ModelId.of("project", "dataset", "model")); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareModelIds(MODEL, ModelId.fromPb(MODEL.toPb())); compareModelIds(MODEL_COMPLETE, ModelId.fromPb(MODEL_COMPLETE.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { ModelId differentProjectTable = ModelId.of("differentProject", "dataset", "model"); assertEquals(differentProjectTable, MODEL.setProjectId("differentProject")); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelInfoTest.java index 87fa8bbf56..be0e1ff233 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelInfoTest.java @@ -15,16 +15,16 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.api.services.bigquery.model.TrainingOptions; import com.google.api.services.bigquery.model.TrainingRun; import java.util.Arrays; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class ModelInfoTest { +class ModelInfoTest { private static final ModelId MODEL_ID = ModelId.of("dataset", "model"); private static final String ETAG = "etag"; @@ -57,18 +57,18 @@ public class ModelInfoTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareModelInfo(MODEL_INFO, MODEL_INFO.toBuilder().build()); } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { ModelInfo modelInfo = ModelInfo.of(MODEL_ID); assertEquals(modelInfo, modelInfo.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(ETAG, MODEL_INFO.getEtag()); assertEquals(CREATION_TIME, MODEL_INFO.getCreationTime()); assertEquals(LAST_MODIFIED_TIME, MODEL_INFO.getLastModifiedTime()); @@ -81,7 +81,7 @@ public void testBuilder() { } @Test - public void testOf() { + void testOf() { ModelInfo modelInfo = ModelInfo.of(MODEL_ID); assertEquals(MODEL_ID, modelInfo.getModelId()); assertNull(modelInfo.getEtag()); @@ -98,12 +98,12 @@ public void testOf() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareModelInfo(MODEL_INFO, ModelInfo.fromPb(MODEL_INFO.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals("project", MODEL_INFO.setProjectId("project").getModelId().getProject()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java index 62b2cfe7d8..444d47c093 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTableDefinitionTest.java @@ -16,14 +16,14 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; -public class ModelTableDefinitionTest { +class ModelTableDefinitionTest { private static final String LOCATION = "US"; private static final Long NUM_BYTES = 14L; @@ -52,34 +52,33 @@ public class ModelTableDefinitionTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareModelTableDefinition(MODEL_TABLE_DEFINITION, MODEL_TABLE_DEFINITION.toBuilder().build()); } @Test - public void testTypeNullPointerException() { - try { - MODEL_TABLE_DEFINITION.toBuilder().setType(null).build(); - fail(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + void testTypeNullPointerException() { + NullPointerException ex = + Assertions.assertThrows( + NullPointerException.class, + () -> MODEL_TABLE_DEFINITION.toBuilder().setType(null).build()); + assertNotNull(ex.getMessage()); } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { ModelTableDefinition modelTableDefinition = ModelTableDefinition.newBuilder().build(); assertEquals(modelTableDefinition, modelTableDefinition.toBuilder().build()); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { assertEquals( MODEL_TABLE_DEFINITION, ModelTableDefinition.fromPb(MODEL_TABLE_DEFINITION.toPb())); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(MODEL_TABLE_DEFINITION.getSchema(), TABLE_SCHEMA); assertEquals(MODEL_TABLE_DEFINITION.getType(), TableDefinition.Type.MODEL); assertEquals(MODEL_TABLE_DEFINITION.getLocation(), LOCATION); @@ -87,12 +86,12 @@ public void testBuilder() { } @Test - public void testEquals() { + void testEquals() { assertEquals(MODEL_TABLE_DEFINITION, MODEL_TABLE_DEFINITION); } @Test - public void testNotEquals() { + void testNotEquals() { assertNotEquals(MODEL_TABLE_DEFINITION, LOCATION); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTest.java index 756277adc3..acdbdfbfe5 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ModelTest.java @@ -16,25 +16,23 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.junit.MockitoRule; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) -public class ModelTest { +@ExtendWith(MockitoExtension.class) +class ModelTest { private static final ModelId MODEL_ID = ModelId.of("dataset", "model"); private static final String ETAG = "etag"; @@ -54,15 +52,13 @@ public class ModelTest { .setFriendlyName(FRIENDLY_NAME) .build(); - @Rule public MockitoRule rule; - private BigQuery bigquery; private BigQueryOptions mockOptions; private Model expectedModel; private Model model; - @Before - public void setUp() { + @BeforeEach + void setUp() { bigquery = mock(BigQuery.class); mockOptions = mock(BigQueryOptions.class); when(bigquery.getOptions()).thenReturn(mockOptions); @@ -71,7 +67,7 @@ public void setUp() { } @Test - public void testBuilder() { + void testBuilder() { Model builtModel = new Model.Builder(bigquery, MODEL_ID) .setEtag(ETAG) @@ -86,12 +82,12 @@ public void testBuilder() { } @Test - public void testToBuilder() { + void testToBuilder() { compareModelInfo(expectedModel, expectedModel.toBuilder().build()); } @Test - public void testExists_True() { + void testExists_True() { BigQuery.ModelOption[] expectedOptions = {BigQuery.ModelOption.fields()}; when(bigquery.getModel(MODEL_INFO.getModelId(), expectedOptions)).thenReturn(expectedModel); assertTrue(model.exists()); @@ -99,7 +95,7 @@ public void testExists_True() { } @Test - public void testExists_False() { + void testExists_False() { BigQuery.ModelOption[] expectedOptions = {BigQuery.ModelOption.fields()}; when(bigquery.getModel(MODEL_INFO.getModelId(), expectedOptions)).thenReturn(null); assertFalse(model.exists()); @@ -107,7 +103,7 @@ public void testExists_False() { } @Test - public void testReload() { + void testReload() { ModelInfo updatedInfo = MODEL_INFO.toBuilder().setDescription("Description").build(); Model expectedModel = new Model(bigquery, new ModelInfo.BuilderImpl(updatedInfo)); when(bigquery.getModel(MODEL_INFO.getModelId())).thenReturn(expectedModel); @@ -117,14 +113,14 @@ public void testReload() { } @Test - public void testReloadNull() { + void testReloadNull() { when(bigquery.getModel(MODEL_INFO.getModelId())).thenReturn(null); assertNull(model.reload()); verify(bigquery).getModel(MODEL_INFO.getModelId()); } @Test - public void testUpdate() { + void testUpdate() { Model expectedUpdatedModel = expectedModel.toBuilder().setDescription("Description").build(); when(bigquery.update(eq(expectedModel))).thenReturn(expectedUpdatedModel); Model actualUpdatedModel = model.update(); @@ -133,7 +129,7 @@ public void testUpdate() { } @Test - public void testUpdateWithOptions() { + void testUpdateWithOptions() { Model expectedUpdatedModel = expectedModel.toBuilder().setDescription("Description").build(); when(bigquery.update(eq(expectedModel), eq(BigQuery.ModelOption.fields()))) .thenReturn(expectedUpdatedModel); @@ -143,14 +139,14 @@ public void testUpdateWithOptions() { } @Test - public void testDeleteTrue() { + void testDeleteTrue() { when(bigquery.delete(MODEL_INFO.getModelId())).thenReturn(true); assertTrue(model.delete()); verify(bigquery).delete(MODEL_INFO.getModelId()); } @Test - public void testDeleteFalse() { + void testDeleteFalse() { when(bigquery.delete(MODEL_INFO.getModelId())).thenReturn(false); assertFalse(model.delete()); verify(bigquery).delete(MODEL_INFO.getModelId()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/OptionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/OptionTest.java index 58f3148667..b25f067064 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/OptionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/OptionTest.java @@ -16,13 +16,13 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.cloud.bigquery.spi.v2.BigQueryRpc; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; public class OptionTest { @@ -54,11 +54,6 @@ public void testConstructor() { Option option = new Option(RPC_OPTION, null) {}; assertEquals(RPC_OPTION, option.getRpcOption()); assertNull(option.getValue()); - try { - new Option(null, VALUE) {}; - Assert.fail(); - } catch (NullPointerException expected) { - - } + Assertions.assertThrows(NullPointerException.class, () -> new Option(null, VALUE) {}); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java new file mode 100644 index 0000000000..b5ace223f1 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ParquetOptionsTest.java @@ -0,0 +1,76 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.junit.jupiter.api.Test; + +public class ParquetOptionsTest { + + private static final ParquetOptions OPTIONS = + ParquetOptions.newBuilder() + .setEnableListInference(true) + .setEnumAsString(true) + .setMapTargetType("ARRAY_OF_STRUCT") + .build(); + + @Test + public void testToBuilder() { + compareParquetOptions(OPTIONS, OPTIONS.toBuilder().build()); + ParquetOptions parquetOptions = OPTIONS.toBuilder().setEnableListInference(true).build(); + assertEquals(true, parquetOptions.getEnableListInference()); + parquetOptions = + parquetOptions.toBuilder() + .setEnumAsString(true) + .setMapTargetType("ARRAY_OF_STRUCT") + .build(); + compareParquetOptions(OPTIONS, parquetOptions); + } + + @Test + public void testToBuilderIncomplete() { + ParquetOptions parquetOptions = + ParquetOptions.newBuilder().setEnableListInference(true).build(); + assertEquals(parquetOptions, parquetOptions.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(FormatOptions.PARQUET, OPTIONS.getType()); + assertEquals(true, OPTIONS.getEnableListInference()); + assertEquals(true, OPTIONS.getEnumAsString()); + assertEquals("ARRAY_OF_STRUCT", OPTIONS.getMapTargetType()); + } + + @Test + public void testToAndFromPb() { + compareParquetOptions(OPTIONS, ParquetOptions.fromPb(OPTIONS.toPb())); + ParquetOptions parquetOptions = + ParquetOptions.newBuilder().setEnableListInference(true).build(); + compareParquetOptions(parquetOptions, ParquetOptions.fromPb(parquetOptions.toPb())); + } + + private void compareParquetOptions(ParquetOptions expected, ParquetOptions actual) { + assertThat(expected).isEqualTo(actual); + assertThat(expected.getEnableListInference()).isEqualTo(actual.getEnableListInference()); + assertThat(expected.getEnumAsString()).isEqualTo(actual.getEnumAsString()); + assertThat(expected.hashCode()).isEqualTo(actual.hashCode()); + assertThat(expected.toString()).isEqualTo(actual.toString()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java index 291df79fde..ac05a2c1ff 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyHelperTest.java @@ -15,16 +15,16 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.api.services.bigquery.model.Binding; import com.google.cloud.Identity; import com.google.cloud.Policy; import com.google.cloud.Role; import com.google.common.collect.ImmutableList; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class PolicyHelperTest { +class PolicyHelperTest { public static final String ETAG = "etag"; public static final String ROLE1 = "roles/bigquery.admin"; @@ -58,7 +58,7 @@ public class PolicyHelperTest { Policy.newBuilder().setEtag(ETAG).setVersion(1).build(); @Test - public void testConversionWithBindings() { + void testConversionWithBindings() { assertEquals(IAM_POLICY, PolicyHelper.convertFromApiPolicy(API_POLICY)); assertEquals(API_POLICY, PolicyHelper.convertToApiPolicy(IAM_POLICY)); assertEquals( @@ -68,7 +68,7 @@ public void testConversionWithBindings() { } @Test - public void testConversionNoBindings() { + void testConversionNoBindings() { assertEquals(IAM_POLICY_NO_BINDINGS, PolicyHelper.convertFromApiPolicy(API_POLICY_NO_BINDINGS)); assertEquals(API_POLICY_NO_BINDINGS, PolicyHelper.convertToApiPolicy(IAM_POLICY_NO_BINDINGS)); assertEquals( diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java index f23cb36c2d..a94e4324be 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PolicyTagsTest.java @@ -16,13 +16,13 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class PolicyTagsTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PrimaryKeyTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PrimaryKeyTest.java new file mode 100644 index 0000000000..702ca5cb48 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/PrimaryKeyTest.java @@ -0,0 +1,55 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Arrays; +import java.util.List; +import org.junit.jupiter.api.Test; + +class PrimaryKeyTest { + private static final List COLUMNS = Arrays.asList("column1", "column2"); + private static final PrimaryKey PRIMARY_KEY = PrimaryKey.newBuilder().setColumns(COLUMNS).build(); + + @Test + void testToBuilder() { + comparePrimaryKeyDefinition(PRIMARY_KEY, PRIMARY_KEY.toBuilder().build()); + PrimaryKey primaryKey = + PRIMARY_KEY.toBuilder().setColumns(Arrays.asList("col1", "col2", "col3")).build(); + assertEquals(Arrays.asList("col1", "col2", "col3"), primaryKey.getColumns()); + } + + @Test + void testBuilder() { + assertEquals(COLUMNS, PRIMARY_KEY.getColumns()); + PrimaryKey primaryKey = PRIMARY_KEY.newBuilder().setColumns(COLUMNS).build(); + assertEquals(PRIMARY_KEY, primaryKey); + } + + @Test + void testToAndFromPb() { + PrimaryKey primaryKey = PRIMARY_KEY.toBuilder().build(); + assertTrue(PrimaryKey.fromPb(primaryKey.toPb()) instanceof PrimaryKey); + comparePrimaryKeyDefinition(primaryKey, PrimaryKey.fromPb(primaryKey.toPb())); + } + + private void comparePrimaryKeyDefinition(PrimaryKey expected, PrimaryKey value) { + assertEquals(expected.getColumns(), value.getColumns()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java index 3cd4182049..7fe41daa06 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java @@ -16,13 +16,14 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; import com.google.cloud.bigquery.JobInfo.WriteDisposition; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; import com.google.cloud.bigquery.QueryJobConfiguration.Priority; import com.google.cloud.bigquery.TimePartitioning.Type; import com.google.common.collect.ImmutableList; @@ -30,7 +31,7 @@ import java.math.BigDecimal; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class QueryJobConfigurationTest { @@ -78,6 +79,7 @@ public class QueryJobConfigurationTest { private static final Priority PRIORITY = Priority.BATCH; private static final boolean ALLOW_LARGE_RESULTS = true; private static final boolean USE_QUERY_CACHE = false; + private static final boolean CREATE_SESSION = true; private static final boolean FLATTEN_RESULTS = true; private static final boolean USE_LEGACY_SQL = true; private static final Integer MAX_BILLING_TIER = 123; @@ -108,6 +110,9 @@ public class QueryJobConfigurationTest { ImmutableList.of(STRING_PARAMETER, TIMESTAMP_PARAMETER, BIGNUMERIC_PARAMETER); private static final Map NAME_PARAMETER = ImmutableMap.of("string", STRING_PARAMETER, "timestamp", TIMESTAMP_PARAMETER); + private static final String PARAMETER_MODE = "POSITIONAL"; + private static final JobCreationMode JOB_CREATION_MODE = JobCreationMode.JOB_CREATION_OPTIONAL; + private static final String RESERVATION = "reservation"; private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = QueryJobConfiguration.newBuilder(QUERY) .setUseQueryCache(USE_QUERY_CACHE) @@ -118,6 +123,7 @@ public class QueryJobConfigurationTest { .setDestinationTable(TABLE_ID) .setWriteDisposition(WRITE_DISPOSITION) .setPriority(PRIORITY) + .setCreateSession(CREATE_SESSION) .setFlattenResults(FLATTEN_RESULTS) .setUserDefinedFunctions(USER_DEFINED_FUNCTIONS) .setDryRun(true) @@ -133,19 +139,21 @@ public class QueryJobConfigurationTest { .setRangePartitioning(RANGE_PARTITIONING) .setConnectionProperties(CONNECTION_PROPERTIES) .setPositionalParameters(POSITIONAL_PARAMETER) + .setParameterMode(PARAMETER_MODE) + .setReservation(RESERVATION) .build(); private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_ADD_POSITIONAL_PARAMETER = - QUERY_JOB_CONFIGURATION - .toBuilder() + QUERY_JOB_CONFIGURATION.toBuilder() .setPositionalParameters(ImmutableList.of()) .addPositionalParameter(STRING_PARAMETER) .build(); private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_SET_NAME_PARAMETER = - QUERY_JOB_CONFIGURATION - .toBuilder() + QUERY_JOB_CONFIGURATION.toBuilder() .setPositionalParameters(ImmutableList.of()) .setNamedParameters(NAME_PARAMETER) .build(); + private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_SET_JOB_CREATION_MODE = + QUERY_JOB_CONFIGURATION.toBuilder().setJobCreationMode(JOB_CREATION_MODE).build(); @Test public void testToBuilder() { @@ -182,6 +190,7 @@ public void testToPbAndFromPb() { assertNotNull(QUERY_JOB_CONFIGURATION.getConnectionProperties()); assertNotNull(QUERY_JOB_CONFIGURATION.getPositionalParameters()); assertNotNull(QUERY_JOB_CONFIGURATION.getNamedParameters()); + assertNotNull(QUERY_JOB_CONFIGURATION.getReservation()); compareQueryJobConfiguration( QUERY_JOB_CONFIGURATION, QueryJobConfiguration.fromPb(QUERY_JOB_CONFIGURATION.toPb())); QueryJobConfiguration job = QueryJobConfiguration.of(QUERY); @@ -198,8 +207,7 @@ public void testSetProjectId() { @Test public void testSetProjectIdDoNotOverride() { QueryJobConfiguration configuration = - QUERY_JOB_CONFIGURATION - .toBuilder() + QUERY_JOB_CONFIGURATION.toBuilder() .setDestinationTable(TABLE_ID.setProjectId(TEST_PROJECT_ID)) .build() .setProjectId("update-only-on-dataset"); @@ -226,6 +234,13 @@ public void testNamedParameter() { QUERY_JOB_CONFIGURATION_SET_NAME_PARAMETER.toBuilder().build()); } + @Test + public void testJobCreationMode() { + compareQueryJobConfiguration( + QUERY_JOB_CONFIGURATION_SET_JOB_CREATION_MODE, + QUERY_JOB_CONFIGURATION_SET_JOB_CREATION_MODE.toBuilder().build()); + } + private void compareQueryJobConfiguration( QueryJobConfiguration expected, QueryJobConfiguration value) { assertEquals(expected, value); @@ -236,6 +251,7 @@ private void compareQueryJobConfiguration( assertEquals(expected.getCreateDisposition(), value.getCreateDisposition()); assertEquals(expected.getDefaultDataset(), value.getDefaultDataset()); assertEquals(expected.getDestinationTable(), value.getDestinationTable()); + assertEquals(expected.createSession(), value.createSession()); assertEquals(expected.flattenResults(), value.flattenResults()); assertEquals(expected.getPriority(), value.getPriority()); assertEquals(expected.getQuery(), value.getQuery()); @@ -258,5 +274,6 @@ private void compareQueryJobConfiguration( assertEquals(expected.getConnectionProperties(), value.getConnectionProperties()); assertEquals(expected.getPositionalParameters(), value.getPositionalParameters()); assertEquals(expected.getNamedParameters(), value.getNamedParameters()); + assertEquals(expected.getReservation(), value.getReservation()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java index b643ae5808..276234246e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java @@ -16,49 +16,27 @@ package com.google.cloud.bigquery; +import static com.google.cloud.bigquery.QueryParameterValue.TIMESTAMP_FORMATTER; import static com.google.common.truth.Truth.assertThat; -import static org.threeten.bp.temporal.ChronoField.HOUR_OF_DAY; -import static org.threeten.bp.temporal.ChronoField.MINUTE_OF_HOUR; -import static org.threeten.bp.temporal.ChronoField.NANO_OF_SECOND; -import static org.threeten.bp.temporal.ChronoField.SECOND_OF_MINUTE; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.api.services.bigquery.model.QueryParameterType; import com.google.common.collect.ImmutableMap; +import com.google.gson.JsonObject; import java.math.BigDecimal; import java.text.ParseException; +import java.time.Instant; +import java.time.Period; +import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import org.junit.Assert; -import org.junit.Test; -import org.threeten.bp.Instant; -import org.threeten.bp.ZoneOffset; -import org.threeten.bp.format.DateTimeFormatter; -import org.threeten.bp.format.DateTimeFormatterBuilder; -import org.threeten.bp.jdk8.Jdk8Methods; +import org.junit.jupiter.api.Test; +import org.threeten.extra.PeriodDuration; public class QueryParameterValueTest { - private static final DateTimeFormatter TIMESTAMPFORMATTER = - new DateTimeFormatterBuilder() - .parseLenient() - .append(DateTimeFormatter.ISO_LOCAL_DATE) - .appendLiteral(' ') - .appendValue(HOUR_OF_DAY, 2) - .appendLiteral(':') - .appendValue(MINUTE_OF_HOUR, 2) - .optionalStart() - .appendLiteral(':') - .appendValue(SECOND_OF_MINUTE, 2) - .optionalStart() - .appendFraction(NANO_OF_SECOND, 6, 9, true) - .optionalStart() - .appendOffset("+HHMM", "+00:00") - .optionalEnd() - .toFormatter() - .withZone(ZoneOffset.UTC); - private static final QueryParameterValue QUERY_PARAMETER_VALUE = QueryParameterValue.newBuilder() .setType(StandardSQLTypeName.STRING) @@ -79,12 +57,11 @@ public void testBuilder() { @Test public void testTypeNullPointerException() { - try { - QUERY_PARAMETER_VALUE.toBuilder().setType(null).build(); - Assert.fail(); - } catch (NullPointerException ex) { - assertThat(ex).isNotNull(); - } + NullPointerException ex = + assertThrows( + NullPointerException.class, + () -> QUERY_PARAMETER_VALUE.toBuilder().setType(null).build()); + assertThat(ex).isNotNull(); } @Test @@ -193,6 +170,51 @@ public void testString() { assertThat(value.getArrayValues()).isNull(); } + @Test + public void testGeography() { + QueryParameterValue value = QueryParameterValue.geography("POINT(-122.350220 47.649154)"); + assertThat(value.getValue()).isEqualTo("POINT(-122.350220 47.649154)"); + assertThat(value.getType()).isEqualTo(StandardSQLTypeName.GEOGRAPHY); + assertThat(value.getArrayType()).isNull(); + assertThat(value.getArrayValues()).isNull(); + } + + @Test + public void testJson() { + QueryParameterValue value = + QueryParameterValue.json("{\"class\" : {\"students\" : [{\"name\" : \"Jane\"}]}}"); + JsonObject jsonObject = new JsonObject(); + jsonObject.addProperty("class", "student"); + QueryParameterValue value1 = QueryParameterValue.json(jsonObject); + assertThat(value.getValue()) + .isEqualTo("{\"class\" : {\"students\" : [{\"name\" : \"Jane\"}]}}"); + assertThat(value1.getValue()).isEqualTo("{\"class\":\"student\"}"); + assertThat(value.getType()).isEqualTo(StandardSQLTypeName.JSON); + assertThat(value1.getType()).isEqualTo(StandardSQLTypeName.JSON); + assertThat(value.getArrayType()).isNull(); + assertThat(value1.getArrayType()).isNull(); + assertThat(value.getArrayValues()).isNull(); + assertThat(value1.getArrayType()).isNull(); + } + + @Test + public void testInterval() { + QueryParameterValue value = QueryParameterValue.interval("123-7 -19 0:24:12.000006"); + QueryParameterValue value1 = QueryParameterValue.interval("P123Y7M-19DT0H24M12.000006S"); + QueryParameterValue value2 = + QueryParameterValue.interval( + PeriodDuration.of(Period.of(1, 2, 25), java.time.Duration.ofHours(8))); + assertThat(value.getValue()).isEqualTo("123-7 -19 0:24:12.000006"); + assertThat(value1.getValue()).isEqualTo("P123Y7M-19DT0H24M12.000006S"); + assertThat(value2.getValue()).isEqualTo("P1Y2M25DT8H"); + assertThat(value.getType()).isEqualTo(StandardSQLTypeName.INTERVAL); + assertThat(value1.getType()).isEqualTo(StandardSQLTypeName.INTERVAL); + assertThat(value2.getType()).isEqualTo(StandardSQLTypeName.INTERVAL); + assertThat(value.getArrayType()).isNull(); + assertThat(value1.getArrayType()).isNull(); + assertThat(value2.getArrayType()).isNull(); + } + @Test public void testBytes() { QueryParameterValue value = QueryParameterValue.bytes(new byte[] {1, 3}); @@ -278,57 +300,89 @@ public void testStringArray() { @Test public void testTimestampFromLong() { - QueryParameterValue value = QueryParameterValue.timestamp(1408452095220000L); - assertThat(value.getValue()).isEqualTo("2014-08-19 12:41:35.220000+00:00"); - assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value.getArrayType()).isNull(); - assertThat(value.getArrayValues()).isNull(); + // Expects output to be ISO8601 string with microsecond precision + assertTimestampValue( + QueryParameterValue.timestamp(1408452095220000L), "2014-08-19 12:41:35.220000+00:00"); } @Test public void testTimestampWithFormatter() { long timestampInMicroseconds = 1571068536842L * 1000 + 123; long microseconds = 1_000_000; - long secs = Jdk8Methods.floorDiv(timestampInMicroseconds, microseconds); - int nano = (int) Jdk8Methods.floorMod(timestampInMicroseconds, microseconds) * 1000; + long secs = Math.floorDiv(timestampInMicroseconds, microseconds); + int nano = (int) Math.floorMod(timestampInMicroseconds, microseconds) * 1000; Instant instant = Instant.ofEpochSecond(secs, nano); - String expected = TIMESTAMPFORMATTER.format(instant); - assertThat(expected) - .isEqualTo(QueryParameterValue.timestamp(timestampInMicroseconds).getValue()); + String expected = TIMESTAMP_FORMATTER.format(instant); + assertTimestampValue(QueryParameterValue.timestamp(timestampInMicroseconds), expected); } @Test - public void testTimestamp() { - QueryParameterValue value = QueryParameterValue.timestamp("2014-08-19 12:41:35.220000+00:00"); - assertThat(value.getValue()).isEqualTo("2014-08-19 12:41:35.220000+00:00"); - assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value.getArrayType()).isNull(); - assertThat(value.getArrayValues()).isNull(); + public void testTimestampFromString() { + assertTimestampValue( + QueryParameterValue.timestamp("2014-08-19 12:41:35.220000+00:00"), + "2014-08-19 12:41:35.220000+00:00"); + assertTimestampValue( + QueryParameterValue.timestamp("2025-08-19 12:34:56.123456789+00:00"), + "2025-08-19 12:34:56.123456789+00:00"); + + // The following test cases test more than nanosecond precision + // 10 digits of precision (1 digit more than nanosecond) + assertTimestampValue( + QueryParameterValue.timestamp("2025-12-08 12:34:56.1234567890+00:00"), + "2025-12-08 12:34:56.1234567890+00:00"); + // 12 digits (picosecond precision) + assertTimestampValue( + QueryParameterValue.timestamp("2025-12-08 12:34:56.123456789123+00:00"), + "2025-12-08 12:34:56.123456789123+00:00"); + + // More than picosecond precision + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2025-12-08 12:34:56.1234567891234+00:00")); + assertThrows( + IllegalArgumentException.class, + () -> + QueryParameterValue.timestamp("2025-12-08 12:34:56.123456789123456789123456789+00:00")); } @Test public void testTimestampWithDateTimeFormatterBuilder() { - QueryParameterValue value = QueryParameterValue.timestamp("2019-02-14 12:34:45.938993Z"); - assertThat(value.getValue()).isEqualTo("2019-02-14 12:34:45.938993Z"); - assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value.getArrayType()).isNull(); - assertThat(value.getArrayValues()).isNull(); - QueryParameterValue value1 = QueryParameterValue.timestamp("2019-02-14 12:34:45.938993+0000"); - assertThat(value1.getValue()).isEqualTo("2019-02-14 12:34:45.938993+0000"); - assertThat(value1.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value1.getArrayType()).isNull(); - assertThat(value1.getArrayValues()).isNull(); - QueryParameterValue value2 = QueryParameterValue.timestamp("2019-02-14 12:34:45.102+00:00"); - assertThat(value2.getValue()).isEqualTo("2019-02-14 12:34:45.102+00:00"); - assertThat(value2.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); - assertThat(value2.getArrayType()).isNull(); - assertThat(value2.getArrayValues()).isNull(); + assertTimestampValue( + QueryParameterValue.timestamp("2019-02-14 12:34:45.938993Z"), + "2019-02-14 12:34:45.938993Z"); + assertTimestampValue( + QueryParameterValue.timestamp("2019-02-14 12:34:45.938993+0000"), + "2019-02-14 12:34:45.938993+0000"); + assertTimestampValue( + QueryParameterValue.timestamp("2019-02-14 12:34:45.102+00:00"), + "2019-02-14 12:34:45.102+00:00"); } - @Test(expected = IllegalArgumentException.class) - public void testInvalidTimestamp() { + @Test + public void testInvalidTimestampStringValues() { + assertThrows(IllegalArgumentException.class, () -> QueryParameterValue.timestamp("abc")); + // missing the time - QueryParameterValue.timestamp("2014-08-19"); + assertThrows(IllegalArgumentException.class, () -> QueryParameterValue.timestamp("2014-08-19")); + + // missing the hour + assertThrows( + IllegalArgumentException.class, () -> QueryParameterValue.timestamp("2014-08-19 12")); + + // can't have the 'T' separator + assertThrows( + IllegalArgumentException.class, () -> QueryParameterValue.timestamp("2014-08-19T12")); + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2014-08-19T12:34:00.123456")); + + // Fractional part has picosecond length, but fractional part is not a valid number + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2014-08-19 12:34:00.123456789abc+00:00")); + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.timestamp("2014-08-19 12:34:00.123456abc789+00:00")); } @Test @@ -351,10 +405,12 @@ public void testStandardDate() throws ParseException { assertThat(value.getArrayValues()).isNull(); } - @Test(expected = IllegalArgumentException.class) + @Test public void testInvalidDate() { // not supposed to have the time - QueryParameterValue.date("2014-08-19 12:41:35.220000"); + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.date("2014-08-19 12:41:35.220000")); } @Test @@ -366,10 +422,12 @@ public void testTime() { assertThat(value.getArrayValues()).isNull(); } - @Test(expected = IllegalArgumentException.class) + @Test public void testInvalidTime() { // not supposed to have the date - QueryParameterValue.time("2014-08-19 12:41:35.220000"); + assertThrows( + IllegalArgumentException.class, + () -> QueryParameterValue.time("2014-08-19 12:41:35.220000")); } @Test @@ -381,10 +439,10 @@ public void testDateTime() { assertThat(value.getArrayValues()).isNull(); } - @Test(expected = IllegalArgumentException.class) + @Test public void testInvalidDateTime() { // missing the time - QueryParameterValue.dateTime("2014-08-19"); + assertThrows(IllegalArgumentException.class, () -> QueryParameterValue.dateTime("2014-08-19")); } @Test @@ -515,6 +573,48 @@ public void testNestedStruct() { assertThat(nestedRecordField.getStructValues().size()).isEqualTo(structValue.size()); } + @Test + public void testStructArray() { + Boolean[] boolValues = new Boolean[] {true, false}; + Integer[] intValues = new Integer[] {15, 20}; + String[] stringValues = new String[] {"test-string", "test-string2"}; + List> fieldMaps = new ArrayList<>(); + List tuples = new ArrayList<>(); + for (int i = 0; i < 2; i++) { + QueryParameterValue booleanField = QueryParameterValue.bool(boolValues[i]); + QueryParameterValue integerField = QueryParameterValue.int64(intValues[i]); + QueryParameterValue stringField = QueryParameterValue.string(stringValues[i]); + ImmutableMap fieldMap = + ImmutableMap.of( + "booleanField", + booleanField, + "integerField", + integerField, + "stringField", + stringField); + fieldMaps.add(fieldMap); + QueryParameterValue recordField = QueryParameterValue.struct(fieldMap); + tuples.add(recordField); + } + QueryParameterValue repeatedRecordField = + QueryParameterValue.array(tuples.toArray(), StandardSQLTypeName.STRUCT); + com.google.api.services.bigquery.model.QueryParameterValue parameterValue = + repeatedRecordField.toValuePb(); + QueryParameterType parameterType = repeatedRecordField.toTypePb(); + QueryParameterValue queryParameterValue = + QueryParameterValue.fromPb(parameterValue, parameterType); + assertThat(queryParameterValue.getValue()).isNull(); + assertThat(queryParameterValue.getType()).isEqualTo(StandardSQLTypeName.ARRAY); + assertThat(queryParameterValue.getArrayType()).isEqualTo(StandardSQLTypeName.STRUCT); + assertThat(queryParameterValue.getArrayValues().size()).isEqualTo(2); + for (int i = 0; i < 2; i++) { + QueryParameterValue record = queryParameterValue.getArrayValues().get(i); + assertThat(record.getType()).isEqualTo(StandardSQLTypeName.STRUCT); + assertThat(record.getStructTypes()).isNotNull(); + assertThat(record.getStructValues()).isEqualTo(fieldMaps.get(i)); + } + } + private static void assertArrayDataEquals( String[] expectedValues, StandardSQLTypeName expectedType, @@ -528,4 +628,76 @@ private static void assertArrayDataEquals( assertThat(value.getArrayValues()).isNull(); } } + + @Test + public void testRange() { + testRangeDataEquals(null, null, FieldElementType.newBuilder().setType("DATE").build()); + testRangeDataEquals(null, "1971-02-03", FieldElementType.newBuilder().setType("DATE").build()); + testRangeDataEquals("1970-01-02", null, FieldElementType.newBuilder().setType("DATE").build()); + testRangeDataEquals( + "1970-01-02", "1971-02-03", FieldElementType.newBuilder().setType("DATE").build()); + + testRangeDataEquals(null, null, FieldElementType.newBuilder().setType("DATETIME").build()); + testRangeDataEquals( + null, + "2015-09-20 06:41:35.220000", + FieldElementType.newBuilder().setType("DATETIME").build()); + testRangeDataEquals( + "2014-08-19 05:41:35.220000", + null, + FieldElementType.newBuilder().setType("DATETIME").build()); + testRangeDataEquals( + "2014-08-19 05:41:35.220000", + "2015-09-20 06:41:35.220000", + FieldElementType.newBuilder().setType("DATETIME").build()); + + testRangeDataEquals(null, null, FieldElementType.newBuilder().setType("TIMESTAMP").build()); + testRangeDataEquals( + null, + "2015-09-20 13:41:35.220000+01:00", + FieldElementType.newBuilder().setType("TIMESTAMP").build()); + testRangeDataEquals( + "2014-08-19 12:41:35.220000+00:00", + null, + FieldElementType.newBuilder().setType("TIMESTAMP").build()); + testRangeDataEquals( + "2014-08-19 12:41:35.220000+00:00", + "2015-09-20 13:41:35.220000+01:00", + FieldElementType.newBuilder().setType("TIMESTAMP").build()); + } + + /** Helper method to test range QueryParameterValue and its permutations. */ + private static void testRangeDataEquals(String start, String end, FieldElementType type) { + QueryParameterValue rangeField = + QueryParameterValue.range( + Range.newBuilder().setType(type).setStart(start).setEnd(end).build()); + QueryParameterType parameterType = rangeField.toTypePb(); + com.google.api.services.bigquery.model.QueryParameterValue parameterValue = + rangeField.toValuePb(); + QueryParameterValue queryParameterValue = + QueryParameterValue.fromPb(parameterValue, parameterType); + + assertThat(queryParameterValue.getType()).isEqualTo(StandardSQLTypeName.RANGE); + if (start == null) { + assertThat(queryParameterValue.getRangeValues().getStart().isNull()).isTrue(); + } else { + assertThat(queryParameterValue.getRangeValues().getStart().getStringValue()).isEqualTo(start); + } + if (end == null) { + assertThat(queryParameterValue.getRangeValues().getEnd().isNull()).isTrue(); + } else { + assertThat(queryParameterValue.getRangeValues().getEnd().getStringValue()).isEqualTo(end); + } + assertThat(queryParameterValue.getRangeValues().getType()).isEqualTo(type); + assertThat(queryParameterValue.getArrayValues()).isNull(); + assertThat(queryParameterValue.getStructValues()).isNull(); + assertThat(queryParameterValue.getValue()).isNull(); + } + + private void assertTimestampValue(QueryParameterValue value, String expectedStringValue) { + assertThat(value.getValue()).isEqualTo(expectedStringValue); + assertThat(value.getType()).isEqualTo(StandardSQLTypeName.TIMESTAMP); + assertThat(value.getArrayType()).isNull(); + assertThat(value.getArrayValues()).isNull(); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java index 4adeeaf006..be1f0e1982 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryRequestInfoTest.java @@ -16,19 +16,22 @@ package com.google.cloud.bigquery; -import static org.assertj.core.api.Assertions.*; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.api.services.bigquery.model.QueryRequest; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.SchemaUpdateOption; import com.google.cloud.bigquery.JobInfo.WriteDisposition; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; import com.google.cloud.bigquery.QueryJobConfiguration.Priority; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class QueryRequestInfoTest { @@ -74,6 +77,7 @@ public class QueryRequestInfoTest { private static final WriteDisposition WRITE_DISPOSITION = WriteDisposition.WRITE_APPEND; private static final Priority PRIORITY = Priority.BATCH; private static final boolean ALLOW_LARGE_RESULTS = true; + private static final boolean CREATE_SESSION = true; private static final boolean USE_QUERY_CACHE = false; private static final boolean FLATTEN_RESULTS = true; private static final boolean USE_LEGACY_SQL = true; @@ -104,6 +108,9 @@ public class QueryRequestInfoTest { ImmutableList.of(STRING_PARAMETER, TIMESTAMP_PARAMETER); private static final Map NAME_PARAMETER = ImmutableMap.of("string", STRING_PARAMETER, "timestamp", TIMESTAMP_PARAMETER); + private static final JobCreationMode jobCreationModeRequired = + JobCreationMode.JOB_CREATION_REQUIRED; + private static final String RESERVATION = "reservation"; private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION = QueryJobConfiguration.newBuilder(QUERY) .setUseQueryCache(USE_QUERY_CACHE) @@ -130,8 +137,11 @@ public class QueryRequestInfoTest { .setConnectionProperties(CONNECTION_PROPERTIES) .setPositionalParameters(POSITIONAL_PARAMETER) .setMaxResults(100L) + .setJobCreationMode(jobCreationModeRequired) + .setReservation(RESERVATION) .build(); - QueryRequestInfo REQUEST_INFO = new QueryRequestInfo(QUERY_JOB_CONFIGURATION); + QueryRequestInfo REQUEST_INFO = + new QueryRequestInfo(QUERY_JOB_CONFIGURATION, DataFormatOptions.newBuilder().build()); private static final QueryJobConfiguration QUERY_JOB_CONFIGURATION_SUPPORTED = QueryJobConfiguration.newBuilder(QUERY) .setUseQueryCache(USE_QUERY_CACHE) @@ -142,14 +152,22 @@ public class QueryRequestInfoTest { .setLabels(LABELS) .setConnectionProperties(CONNECTION_PROPERTIES) .setPositionalParameters(POSITIONAL_PARAMETER) + .setCreateSession(CREATE_SESSION) .setMaxResults(100L) + .setReservation(RESERVATION) .build(); - QueryRequestInfo REQUEST_INFO_SUPPORTED = new QueryRequestInfo(QUERY_JOB_CONFIGURATION_SUPPORTED); + QueryRequestInfo REQUEST_INFO_SUPPORTED = + new QueryRequestInfo( + QUERY_JOB_CONFIGURATION_SUPPORTED, DataFormatOptions.newBuilder().build()); @Test public void testIsFastQuerySupported() { - assertEquals(false, REQUEST_INFO.isFastQuerySupported()); - assertEquals(true, REQUEST_INFO_SUPPORTED.isFastQuerySupported()); + JobId jobIdSupported = JobId.newBuilder().build(); + JobId jobIdNotSupported = JobId.newBuilder().setJob("random-job-id").build(); + assertEquals(false, REQUEST_INFO.isFastQuerySupported(jobIdSupported)); + assertEquals(true, REQUEST_INFO_SUPPORTED.isFastQuerySupported(jobIdSupported)); + assertEquals(false, REQUEST_INFO.isFastQuerySupported(jobIdNotSupported)); + assertEquals(false, REQUEST_INFO_SUPPORTED.isFastQuerySupported(jobIdNotSupported)); } @Test @@ -161,12 +179,54 @@ public void testToPb() { @Test public void equalTo() { compareQueryRequestInfo( - new QueryRequestInfo(QUERY_JOB_CONFIGURATION_SUPPORTED), REQUEST_INFO_SUPPORTED); - compareQueryRequestInfo(new QueryRequestInfo(QUERY_JOB_CONFIGURATION), REQUEST_INFO); + new QueryRequestInfo( + QUERY_JOB_CONFIGURATION_SUPPORTED, DataFormatOptions.newBuilder().build()), + REQUEST_INFO_SUPPORTED); + compareQueryRequestInfo( + new QueryRequestInfo(QUERY_JOB_CONFIGURATION, DataFormatOptions.newBuilder().build()), + REQUEST_INFO); + } + + @Test + public void testInt64Timestamp() { + QueryRequestInfo requestInfo = + new QueryRequestInfo(QUERY_JOB_CONFIGURATION, DataFormatOptions.newBuilder().build()); + QueryRequest requestPb = requestInfo.toPb(); + assertFalse(requestPb.getFormatOptions().getUseInt64Timestamp()); + + QueryRequestInfo requestInfoLosslessTs = + new QueryRequestInfo( + QUERY_JOB_CONFIGURATION, + DataFormatOptions.newBuilder().useInt64Timestamp(true).build()); + QueryRequest requestLosslessTsPb = requestInfoLosslessTs.toPb(); + assertTrue(requestLosslessTsPb.getFormatOptions().getUseInt64Timestamp()); } + /* + Ref: https://github.com/googleapis/java-bigquery/issues/2083 + Refactoring to remove the assert4j dependency which was causing RequireUpperBoundDeps Error + */ private void compareQueryRequestInfo(QueryRequestInfo expected, QueryRequestInfo actual) { + QueryRequest expectedQueryReq = expected.toPb(); + QueryRequest actualQueryReq = actual.toPb(); + // requestId are expected to be different - assertThat(actual).isEqualToIgnoringGivenFields(expected, "requestId"); + assertNotEquals(expectedQueryReq.getRequestId(), actualQueryReq.getRequestId()); + // rest of the attributes should be equal + assertEquals( + expectedQueryReq.getConnectionProperties(), actualQueryReq.getConnectionProperties()); + assertEquals(expectedQueryReq.getDefaultDataset(), actualQueryReq.getDefaultDataset()); + assertEquals(expectedQueryReq.getDryRun(), actualQueryReq.getDryRun()); + assertEquals(expectedQueryReq.getLabels(), actualQueryReq.getLabels()); + assertEquals(expectedQueryReq.getMaximumBytesBilled(), actualQueryReq.getMaximumBytesBilled()); + assertEquals(expectedQueryReq.getMaxResults(), actualQueryReq.getMaxResults()); + assertEquals(expectedQueryReq.getQuery(), actualQueryReq.getQuery()); + assertEquals(expectedQueryReq.getQueryParameters(), actualQueryReq.getQueryParameters()); + assertEquals(expectedQueryReq.getCreateSession(), actualQueryReq.getCreateSession()); + assertEquals(expectedQueryReq.getUseQueryCache(), actualQueryReq.getUseQueryCache()); + assertEquals(expectedQueryReq.getUseLegacySql(), actualQueryReq.getUseLegacySql()); + assertEquals(expectedQueryReq.get("jobCreationMode"), actualQueryReq.get("jobCreationMode")); + assertEquals(expectedQueryReq.getFormatOptions(), actualQueryReq.getFormatOptions()); + assertEquals(expectedQueryReq.getReservation(), actualQueryReq.getReservation()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java index bc7d6083be..30eeb90ad4 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import com.google.api.services.bigquery.model.ExplainQueryStep; import com.google.cloud.bigquery.QueryStage.QueryStep; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class QueryStageTest { +class QueryStageTest { private static final List SUBSTEPS1 = ImmutableList.of("substep1", "substep2"); private static final List SUBSTEPS2 = ImmutableList.of("substep3", "substep4"); @@ -96,7 +96,7 @@ public class QueryStageTest { .build(); @Test - public void testQueryStepConstructor() { + void testQueryStepConstructor() { assertEquals("KIND", QUERY_STEP1.getName()); assertEquals("KIND", QUERY_STEP2.getName()); assertEquals(SUBSTEPS1, QUERY_STEP1.getSubsteps()); @@ -104,7 +104,7 @@ public void testQueryStepConstructor() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(COMPLETED_PARALLEL_INPUTS, QUERY_STAGE.getCompletedParallelInputs()); assertEquals(COMPUTE_MS_AVG, QUERY_STAGE.getComputeMsAvg()); assertEquals(COMPUTE_MS_MAX, QUERY_STAGE.getComputeMsMax()); @@ -138,7 +138,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareQueryStep(QUERY_STEP1, QueryStep.fromPb(QUERY_STEP1.toPb())); compareQueryStep(QUERY_STEP2, QueryStep.fromPb(QUERY_STEP2.toPb())); compareQueryStage(QUERY_STAGE, QueryStage.fromPb(QUERY_STAGE.toPb())); @@ -149,14 +149,14 @@ public void testToAndFromPb() { } @Test - public void testEquals() { + void testEquals() { compareQueryStep(QUERY_STEP1, QUERY_STEP1); compareQueryStep(QUERY_STEP2, QUERY_STEP2); compareQueryStage(QUERY_STAGE, QUERY_STAGE); } @Test - public void testNotEquals() { + void testNotEquals() { assertNotEquals(QUERY_STAGE, QUERY_STEP1); assertNotEquals(QUERY_STEP1, QUERY_STAGE); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RangeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RangeTest.java new file mode 100644 index 0000000000..b72b4b70c2 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RangeTest.java @@ -0,0 +1,118 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.google.common.collect.ImmutableMap; +import org.junit.jupiter.api.Test; + +public class RangeTest { + private static final Range RANGE_DATE = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .setStart("1970-01-02") + .setEnd("1970-03-04") + .build(); + + private static final Range RANGE_DATETIME = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .setStart("2014-08-19 05:41:35.220000") + .setEnd("2015-09-20 06:41:35.220000") + .build(); + + private static final Range RANGE_TIMESTAMP = + Range.newBuilder() + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd("2015-09-20 13:41:35.220000+01:00") + .build(); + + @Test + public void testOf() { + compareRange(null, null, Range.of("[null, NULL)")); + compareRange(null, null, Range.of("[unbounded, UNBOUNDED)")); + compareRange(null, null, Range.of("[nUlL, uNbOuNdEd)")); + + compareRange(null, "2020-12-31", Range.of("[null, 2020-12-31)")); + compareRange("2020-01-01", null, Range.of("[2020-01-01, null)")); + compareRange("2020-01-01", "2020-12-31", Range.of("[2020-01-01, 2020-12-31)")); + } + + @Test + public void testBuilder() { + assertEquals("1970-01-02", RANGE_DATE.getStart().getStringValue()); + assertEquals("1970-03-04", RANGE_DATE.getEnd().getStringValue()); + assertEquals(FieldElementType.newBuilder().setType("DATE").build(), RANGE_DATE.getType()); + + assertEquals("2014-08-19 05:41:35.220000", RANGE_DATETIME.getStart().getStringValue()); + assertEquals("2015-09-20 06:41:35.220000", RANGE_DATETIME.getEnd().getStringValue()); + assertEquals( + FieldElementType.newBuilder().setType("DATETIME").build(), RANGE_DATETIME.getType()); + + assertEquals("2014-08-19 12:41:35.220000+00:00", RANGE_TIMESTAMP.getStart().getStringValue()); + assertEquals("2015-09-20 13:41:35.220000+01:00", RANGE_TIMESTAMP.getEnd().getStringValue()); + assertEquals( + FieldElementType.newBuilder().setType("TIMESTAMP").build(), RANGE_TIMESTAMP.getType()); + } + + @Test + public void testToBuilder() { + compareRange(RANGE_DATE, RANGE_DATE.toBuilder().build()); + compareRange(RANGE_DATETIME, RANGE_DATETIME.toBuilder().build()); + compareRange(RANGE_TIMESTAMP, RANGE_TIMESTAMP.toBuilder().build()); + } + + @Test + public void testGetValues() { + compareRange(null, null, Range.of("[null, NULL)").getValues()); + compareRange(null, null, Range.of("[unbounded, UNBOUNDED)").getValues()); + compareRange(null, null, Range.of("[nUlL, uNbOuNdEd)").getValues()); + + compareRange(null, "2020-12-31", Range.of("[null, 2020-12-31)").getValues()); + compareRange("2020-01-01", null, Range.of("[2020-01-01, null)").getValues()); + compareRange("2020-01-01", "2020-12-31", Range.of("[2020-01-01, 2020-12-31)").getValues()); + } + + private static void compareRange(Range expected, Range value) { + assertEquals(expected.getStart(), value.getStart()); + assertEquals(expected.getEnd(), value.getEnd()); + assertEquals(expected.getType(), value.getType()); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + } + + private static void compareRange(String expectedStart, String expectedEnd, Range range) { + if (expectedStart == null) { + assertTrue(range.getStart().isNull()); + } else { + assertEquals(expectedStart, range.getStart().getStringValue()); + } + if (expectedEnd == null) { + assertTrue(range.getEnd().isNull()); + } else { + assertEquals(expectedEnd, range.getEnd().getStringValue()); + } + } + + private static void compareRange( + String expectedStart, String expectedEnd, ImmutableMap values) { + assertEquals(expectedStart, values.get("start")); + assertEquals(expectedEnd, values.get("end")); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RemoteFunctionOptionsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RemoteFunctionOptionsTest.java new file mode 100644 index 0000000000..a3559f5cfc --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RemoteFunctionOptionsTest.java @@ -0,0 +1,73 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.util.HashMap; +import java.util.Map; +import org.junit.jupiter.api.Test; + +class RemoteFunctionOptionsTest { + private static final String endpoint = "https://aaabbbccc-uc.a.run.app"; + private static final String connection = + "projects/{projectId}/locations/{locationId}/connections/{connectionId}"; + private static final Map userDefinedContext = + new HashMap() { + { + put("key1", "value1"); + put("key2", "value2"); + } + }; + private static final Long maxBatchingRows = 20L; + + private static final RemoteFunctionOptions REMOTE_FUNCTION_OPTIONS = + RemoteFunctionOptions.newBuilder() + .setEndpoint(endpoint) + .setConnection(connection) + .setUserDefinedContext(userDefinedContext) + .setMaxBatchingRows(maxBatchingRows) + .build(); + + @Test + void testToBuilder() { + compareRemoteFunctionOptions( + REMOTE_FUNCTION_OPTIONS, REMOTE_FUNCTION_OPTIONS.toBuilder().build()); + } + + @Test + void testBuilder() { + assertEquals(endpoint, REMOTE_FUNCTION_OPTIONS.getEndpoint()); + assertEquals(connection, REMOTE_FUNCTION_OPTIONS.getConnection()); + assertEquals(userDefinedContext, REMOTE_FUNCTION_OPTIONS.getUserDefinedContext()); + assertEquals(maxBatchingRows, REMOTE_FUNCTION_OPTIONS.getMaxBatchingRows()); + } + + @Test + void testToAndFromPb() { + compareRemoteFunctionOptions( + REMOTE_FUNCTION_OPTIONS, RemoteFunctionOptions.fromPb(REMOTE_FUNCTION_OPTIONS.toPb())); + } + + public void compareRemoteFunctionOptions( + RemoteFunctionOptions expected, RemoteFunctionOptions actual) { + assertEquals(expected, actual); + assertEquals(expected.getEndpoint(), actual.getEndpoint()); + assertEquals(expected.getConnection(), actual.getConnection()); + assertEquals(expected.getMaxBatchingRows(), actual.getMaxBatchingRows()); + assertEquals(expected.getUserDefinedContext(), actual.getUserDefinedContext()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineArgumentTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineArgumentTest.java index 909d5981d7..31a2c56de9 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineArgumentTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineArgumentTest.java @@ -15,9 +15,9 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class RoutineArgumentTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineIdTest.java index 94a19fbfd5..2800f3caa6 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineIdTest.java @@ -15,9 +15,9 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class RoutineIdTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java index 1f1181433b..f191cbedd0 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineInfoTest.java @@ -15,14 +15,14 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class RoutineInfoTest { +class RoutineInfoTest { private static final RoutineId ROUTINE_ID = RoutineId.of("dataset", "routine"); private static final String ETAG = "etag"; @@ -33,6 +33,8 @@ public class RoutineInfoTest { private static final Long LAST_MODIFIED_TIME = 20L; private static final String LANGUAGE = "SQL"; + private static final String DATA_GOVERNANCE_TYPE = "DATA_MASKING"; + private static final RoutineArgument ARG_1 = RoutineArgument.newBuilder() .setDataType(StandardSQLDataType.newBuilder("STRING").build()) @@ -50,8 +52,7 @@ public class RoutineInfoTest { private static final String BODY = "body"; private static final RoutineInfo ROUTINE_INFO = - RoutineInfo.of(ROUTINE_ID) - .toBuilder() + RoutineInfo.of(ROUTINE_ID).toBuilder() .setEtag(ETAG) .setRoutineType(ROUTINE_TYPE) .setCreationTime(CREATION_TIME) @@ -63,21 +64,22 @@ public class RoutineInfoTest { .setReturnType(RETURN_TYPE) .setImportedLibraries(IMPORTED_LIBRARIES) .setBody(BODY) + .setDataGovernanceType(DATA_GOVERNANCE_TYPE) .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareRoutineInfo(ROUTINE_INFO, ROUTINE_INFO.toBuilder().build()); } @Test - public void testBuilderIncomplete() { + void testBuilderIncomplete() { RoutineInfo routineInfo = RoutineInfo.of(ROUTINE_ID); assertEquals(routineInfo, routineInfo.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(ROUTINE_ID, ROUTINE_INFO.getRoutineId()); assertEquals(ETAG, ROUTINE_INFO.getEtag()); assertEquals(ROUTINE_TYPE, ROUTINE_INFO.getRoutineType()); @@ -90,10 +92,11 @@ public void testBuilder() { assertEquals(RETURN_TYPE, ROUTINE_INFO.getReturnType()); assertEquals(IMPORTED_LIBRARIES, ROUTINE_INFO.getImportedLibraries()); assertEquals(BODY, ROUTINE_INFO.getBody()); + assertEquals(DATA_GOVERNANCE_TYPE, ROUTINE_INFO.getDataGovernanceType()); } @Test - public void testOf() { + void testOf() { RoutineInfo routineInfo = RoutineInfo.of(ROUTINE_ID); assertEquals(ROUTINE_ID, ROUTINE_INFO.getRoutineId()); assertNull(routineInfo.getEtag()); @@ -107,14 +110,16 @@ public void testOf() { assertNull(routineInfo.getReturnType()); assertNull(routineInfo.getImportedLibraries()); assertNull(routineInfo.getBody()); + assertNull(routineInfo.getDataGovernanceType()); } - public void testToAndFromPb() { + @Test + void testToAndFromPb() { compareRoutineInfo(ROUTINE_INFO, RoutineInfo.fromPb(ROUTINE_INFO.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals("project", ROUTINE_INFO.setProjectId("project").getRoutineId().getProject()); } @@ -132,6 +137,7 @@ public void compareRoutineInfo(RoutineInfo expected, RoutineInfo value) { assertEquals(expected.getReturnType(), value.getReturnType()); assertEquals(expected.getImportedLibraries(), value.getImportedLibraries()); assertEquals(expected.getBody(), value.getBody()); + assertEquals(expected.getDataGovernanceType(), value.getDataGovernanceType()); assertEquals(expected.hashCode(), value.hashCode()); assertEquals(expected.toString(), value.toString()); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineTest.java index f0e29410db..839bfe5e66 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/RoutineTest.java @@ -15,32 +15,34 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.google.common.collect.ImmutableList; +import java.util.HashMap; import java.util.List; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.junit.MockitoRule; - -@RunWith(MockitoJUnitRunner.class) +import java.util.Map; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) public class RoutineTest { private static final RoutineId ROUTINE_ID = RoutineId.of("dataset", "routine"); + private static final RoutineId ROUTINE_ID_TVF = RoutineId.of("dataset", "tvf_routine"); private static final String DETERMINISM_LEVEL = "DETERMINISTIC"; private static final String ETAG = "etag"; private static final String ROUTINE_TYPE = "SCALAR_FUNCTION"; + private static final String ROUTINE_TYPE_TVF = "TABLE_VALUED_FUNCTION"; private static final Long CREATION_TIME = 10L; private static final Long LAST_MODIFIED_TIME = 20L; private static final String LANGUAGE = "SQL"; @@ -56,10 +58,38 @@ public class RoutineTest { private static final StandardSQLDataType RETURN_TYPE = StandardSQLDataType.newBuilder("FLOAT64").build(); + private static final StandardSQLField COLUMN_1 = + StandardSQLField.newBuilder("COLUMN_1", StandardSQLDataType.newBuilder("STRING").build()) + .build(); + private static final StandardSQLField COLUMN_2 = + StandardSQLField.newBuilder("COLUMN_2", StandardSQLDataType.newBuilder("FLOAT64").build()) + .build(); + + private static final List COLUMN_LIST = ImmutableList.of(COLUMN_1, COLUMN_2); + + private static final StandardSQLTableType RETURN_TABLE_TYPE = + StandardSQLTableType.newBuilder(COLUMN_LIST).build(); + private static final List IMPORTED_LIBRARIES = ImmutableList.of("gs://foo", "gs://bar", "gs://baz"); private static final String BODY = "body"; + private static final Map userDefinedContext = + new HashMap() { + { + put("key1", "value1"); + put("key2", "value2"); + } + }; + private static final RemoteFunctionOptions REMOTE_FUNCTION_OPTIONS = + RemoteFunctionOptions.newBuilder() + .setEndpoint("endpoint") + .setConnection("connection") + .setUserDefinedContext(userDefinedContext) + .setMaxBatchingRows(10L) + .build(); + + private static final String DATA_GOVERNANCE_TYPE = "DATA_MASKING"; private static final RoutineInfo ROUTINE_INFO = RoutineInfo.newBuilder(ROUTINE_ID) @@ -73,21 +103,30 @@ public class RoutineTest { .setReturnType(RETURN_TYPE) .setImportedLibraries(IMPORTED_LIBRARIES) .setBody(BODY) + .setRemoteFunctionOptions(REMOTE_FUNCTION_OPTIONS) + .setDataGovernanceType(DATA_GOVERNANCE_TYPE) .build(); - @Rule public MockitoRule rule; + private static final RoutineInfo ROUTINE_INFO_TVF = + RoutineInfo.newBuilder(ROUTINE_ID_TVF) + .setBody(BODY) + .setRoutineType(ROUTINE_TYPE_TVF) + .setReturnTableType(RETURN_TABLE_TYPE) + .build(); private BigQuery bigquery; private BigQueryOptions mockOptions; private Routine expectedRoutine; + private Routine expectedRoutineTvf; private Routine routine; - @Before + @BeforeEach public void setUp() { bigquery = mock(BigQuery.class); mockOptions = mock(BigQueryOptions.class); when(bigquery.getOptions()).thenReturn(mockOptions); expectedRoutine = new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO)); + expectedRoutineTvf = new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO_TVF)); routine = new Routine(bigquery, new RoutineInfo.BuilderImpl(ROUTINE_INFO)); } @@ -105,6 +144,8 @@ public void testBuilder() { .setReturnType(RETURN_TYPE) .setImportedLibraries(IMPORTED_LIBRARIES) .setBody(BODY) + .setRemoteFunctionOptions(REMOTE_FUNCTION_OPTIONS) + .setDataGovernanceType(DATA_GOVERNANCE_TYPE) .build(); assertEquals(ETAG, builtRoutine.getEtag()); assertEquals(DETERMINISM_LEVEL, builtRoutine.getDeterminismLevel()); @@ -114,6 +155,7 @@ public void testBuilder() { @Test public void testToBuilder() { compareRoutineInfo(expectedRoutine, expectedRoutine.toBuilder().build()); + compareRoutineInfo(expectedRoutineTvf, expectedRoutineTvf.toBuilder().build()); } @Test @@ -200,8 +242,11 @@ public void compareRoutineInfo(RoutineInfo expected, RoutineInfo value) { assertEquals(expected.getLanguage(), value.getLanguage()); assertEquals(expected.getArguments(), value.getArguments()); assertEquals(expected.getReturnType(), value.getReturnType()); + assertEquals(expected.getReturnTableType(), value.getReturnTableType()); assertEquals(expected.getImportedLibraries(), value.getImportedLibraries()); assertEquals(expected.getBody(), value.getBody()); assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.getRemoteFunctionOptions(), value.getRemoteFunctionOptions()); + assertEquals(expected.getDataGovernanceType(), value.getDataGovernanceType()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SchemaTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SchemaTest.java index 63c6752d73..9750fd7bd3 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SchemaTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SchemaTest.java @@ -16,14 +16,14 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.api.services.bigquery.model.TableSchema; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class SchemaTest { +class SchemaTest { private static PolicyTags POLICY_TAGS = PolicyTags.newBuilder().setNames(ImmutableList.of("someTag")).build(); @@ -32,6 +32,9 @@ public class SchemaTest { Field.newBuilder("StringField", LegacySQLTypeName.STRING) .setMode(Field.Mode.NULLABLE) .setDescription("FieldDescription1") + .setPrecision(20L) + .setScale(20L) + .setMaxLength(10L) .build(); private static final Field FIELD_SCHEMA2 = Field.newBuilder("IntegerField", LegacySQLTypeName.INTEGER) @@ -50,12 +53,12 @@ public class SchemaTest { private static final Schema TABLE_SCHEMA = Schema.of(FIELDS); @Test - public void testOf() { + void testOf() { compareTableSchema(TABLE_SCHEMA, Schema.of(FIELDS)); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareTableSchema(TABLE_SCHEMA, Schema.fromPb(TABLE_SCHEMA.toPb())); } @@ -65,7 +68,7 @@ private void compareTableSchema(Schema expected, Schema value) { } @Test - public void testEmptySchema() { + void testEmptySchema() { TableSchema tableSchema = new TableSchema(); Schema schema = Schema.fromPb(tableSchema); assertEquals(0, schema.getFields().size()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SerializationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SerializationTest.java index 30bb0db0b8..e91a243949 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SerializationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SerializationTest.java @@ -20,6 +20,7 @@ import com.google.cloud.NoCredentials; import com.google.cloud.PageImpl; import com.google.cloud.Restorable; +import com.google.cloud.bigquery.Acl.DatasetAclEntity; import com.google.cloud.bigquery.StandardTableDefinition.StreamingBuffer; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -38,8 +39,6 @@ public class SerializationTest extends BaseSerializationTest { Acl.of(new Acl.View(TableId.of("project", "dataset", "table")), Acl.Role.WRITER); private static final Acl ROUTINE_ACCESS = Acl.of(new Acl.Routine(RoutineId.of("project", "dataset", "routine")), Acl.Role.WRITER); - private static final List ACCESS_RULES = - ImmutableList.of(DOMAIN_ACCESS, GROUP_ACCESS, VIEW_ACCESS, ROUTINE_ACCESS, USER_ACCESS); private static final Long CREATION_TIME = System.currentTimeMillis() - 10; private static final Long DEFAULT_TABLE_EXPIRATION = 100L; private static final String DESCRIPTION = "Description"; @@ -50,6 +49,11 @@ public class SerializationTest extends BaseSerializationTest { private static final String LOCATION = ""; private static final String SELF_LINK = "http://bigquery/p/d"; private static final DatasetId DATASET_ID = DatasetId.of("project", "dataset"); + private static final List TARGET_TYPES = ImmutableList.of("VIEWS"); + private static final Acl DATASET_ACCESS = Acl.of(new DatasetAclEntity(DATASET_ID, TARGET_TYPES)); + private static final List ACCESS_RULES = + ImmutableList.of( + DOMAIN_ACCESS, GROUP_ACCESS, VIEW_ACCESS, ROUTINE_ACCESS, USER_ACCESS, DATASET_ACCESS); private static final DatasetInfo DATASET_INFO = DatasetInfo.newBuilder(DATASET_ID) .setAcl(ACCESS_RULES) @@ -202,7 +206,11 @@ public class SerializationTest extends BaseSerializationTest { private static final FieldValue FIELD_VALUE = FieldValue.of(FieldValue.Attribute.PRIMITIVE, "value"); private static final TableResult TABLE_RESULT = - new TableResult(Schema.of(), 0L, new PageImpl(null, "", ImmutableList.of())); + TableResult.newBuilder() + .setSchema(Schema.of()) + .setTotalRows(0L) + .setPageNoSchema(new PageImpl(null, "", ImmutableList.of())) + .build(); private static final BigQuery BIGQUERY = BigQueryOptions.newBuilder().setProjectId("p1").build().getService(); private static final Dataset DATASET = @@ -228,6 +236,7 @@ protected Serializable[] serializableObjects() { USER_ACCESS, VIEW_ACCESS, ROUTINE_ACCESS, + DATASET_ACCESS, DATASET_ID, DATASET_INFO, TABLE_ID, diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SnapshotTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SnapshotTableDefinitionTest.java new file mode 100644 index 0000000000..defcd9cb39 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SnapshotTableDefinitionTest.java @@ -0,0 +1,72 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.Test; + +class SnapshotTableDefinitionTest { + + private static final TableId BASE_TABLE_ID = TableId.of("DATASET_NAME", "BASE_TABLE_NAME"); + private static final String SNAPSHOT_TIME = "2021-05-19T11:32:26.553Z"; + private static final SnapshotTableDefinition SNAPSHOTTABLE_DEFINITION = + SnapshotTableDefinition.newBuilder() + .setBaseTableId(BASE_TABLE_ID) + .setSnapshotTime(SNAPSHOT_TIME) + .build(); + + @Test + void testToBuilder() { + compareSnapshotTableDefinition( + SNAPSHOTTABLE_DEFINITION, SNAPSHOTTABLE_DEFINITION.toBuilder().build()); + SnapshotTableDefinition snapshotTableDefinition = + SNAPSHOTTABLE_DEFINITION.toBuilder().setSnapshotTime("2021-05-20T11:32:26.553Z").build(); + assertEquals("2021-05-20T11:32:26.553Z", snapshotTableDefinition.getSnapshotTime()); + } + + @Test + void testBuilder() { + assertEquals(TableDefinition.Type.SNAPSHOT, SNAPSHOTTABLE_DEFINITION.getType()); + assertEquals(BASE_TABLE_ID, SNAPSHOTTABLE_DEFINITION.getBaseTableId()); + assertEquals(SNAPSHOT_TIME, SNAPSHOTTABLE_DEFINITION.getSnapshotTime()); + SnapshotTableDefinition snapshotTableDefinition = + SnapshotTableDefinition.newBuilder() + .setBaseTableId(BASE_TABLE_ID) + .setSnapshotTime(SNAPSHOT_TIME) + .build(); + assertEquals(SNAPSHOTTABLE_DEFINITION, snapshotTableDefinition); + } + + @Test + void testToAndFromPb() { + SnapshotTableDefinition snapshotTableDefinition = SNAPSHOTTABLE_DEFINITION.toBuilder().build(); + assertTrue( + TableDefinition.fromPb(snapshotTableDefinition.toPb()) instanceof SnapshotTableDefinition); + compareSnapshotTableDefinition( + snapshotTableDefinition, + TableDefinition.fromPb(snapshotTableDefinition.toPb())); + } + + private void compareSnapshotTableDefinition( + SnapshotTableDefinition expected, SnapshotTableDefinition value) { + assertEquals(expected, value); + assertEquals(expected.getBaseTableId(), value.getBaseTableId()); + assertEquals(expected.getSnapshotTime(), value.getSnapshotTime()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLDataTypeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLDataTypeTest.java index 635a75612b..ffc6311184 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLDataTypeTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLDataTypeTest.java @@ -15,13 +15,13 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class StandardSQLDataTypeTest { +class StandardSQLDataTypeTest { private static final String STRING_TYPEKIND = "STRING"; private static final String ARRAY_TYPEKIND = "ARRAY"; private static final String STRUCT_TYPEKIND = "STRUCT"; @@ -42,7 +42,7 @@ public class StandardSQLDataTypeTest { StandardSQLDataType.newBuilder(STRUCT_TYPEKIND).setStructType(STRUCT_TYPE).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareStandardSQLDataType(STRING_DATA_TYPE, STRING_DATA_TYPE.toBuilder().build()); compareStandardSQLDataType( ARRAY_OF_STRING_DATA_TYPE, ARRAY_OF_STRING_DATA_TYPE.toBuilder().build()); @@ -50,7 +50,7 @@ public void testToBuilder() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(STRING_TYPEKIND, STRING_DATA_TYPE.getTypeKind()); assertEquals(ARRAY_TYPEKIND, ARRAY_OF_STRING_DATA_TYPE.getTypeKind()); assertEquals(STRING_DATA_TYPE, ARRAY_OF_STRING_DATA_TYPE.getArrayElementType()); @@ -58,7 +58,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareStandardSQLDataType( ARRAY_OF_STRING_DATA_TYPE, StandardSQLDataType.fromPb(ARRAY_OF_STRING_DATA_TYPE.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLFieldTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLFieldTest.java index 904ed80280..5e3af997d7 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLFieldTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLFieldTest.java @@ -15,9 +15,9 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class StandardSQLFieldTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLStructTypeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLStructTypeTest.java index d4fa86950f..ab88de3f0e 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLStructTypeTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLStructTypeTest.java @@ -15,13 +15,13 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class StandardSQLStructTypeTest { +class StandardSQLStructTypeTest { private static final StandardSQLField FIELD_1 = StandardSQLField.newBuilder("FIELD_1", StandardSQLDataType.newBuilder("STRING").build()) @@ -35,18 +35,18 @@ public class StandardSQLStructTypeTest { StandardSQLStructType.newBuilder(FIELD_LIST).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareStandardSQLStructType(STRUCT_TYPE, STRUCT_TYPE.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(FIELD_1, STRUCT_TYPE.getFields().get(0)); assertEquals(FIELD_2, STRUCT_TYPE.getFields().get(1)); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareStandardSQLStructType(STRUCT_TYPE, StandardSQLStructType.fromPb(STRUCT_TYPE.toPb())); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLTableTypeTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLTableTypeTest.java new file mode 100644 index 0000000000..ce5a4992cd --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardSQLTableTypeTest.java @@ -0,0 +1,60 @@ +/* + * Copyright 2021 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.google.common.collect.ImmutableList; +import java.util.List; +import org.junit.jupiter.api.Test; + +public class StandardSQLTableTypeTest { + + private static final StandardSQLField COLUMN_1 = + StandardSQLField.newBuilder("COLUMN_1", StandardSQLDataType.newBuilder("STRING").build()) + .build(); + private static final StandardSQLField COLUMN_2 = + StandardSQLField.newBuilder("COLUMN_2", StandardSQLDataType.newBuilder("FLOAT64").build()) + .build(); + + private static final List COLUMN_LIST = ImmutableList.of(COLUMN_1, COLUMN_2); + private static final StandardSQLTableType TABLE_TYPE = + StandardSQLTableType.newBuilder(COLUMN_LIST).build(); + + @Test + public void testToBuilder() { + compareStandardSQLTableType(TABLE_TYPE, TABLE_TYPE.toBuilder().build()); + } + + @Test + public void testBuilder() { + assertEquals(COLUMN_1, TABLE_TYPE.getColumns().get(0)); + assertEquals(COLUMN_2, TABLE_TYPE.getColumns().get(1)); + } + + @Test + public void testToAndFromPb() { + compareStandardSQLTableType(TABLE_TYPE, StandardSQLTableType.fromPb(TABLE_TYPE.toPb())); + } + + private void compareStandardSQLTableType( + StandardSQLTableType expected, StandardSQLTableType value) { + assertEquals(expected, value); + assertEquals(expected.getColumns(), value.getColumns()); + assertEquals(expected.hashCode(), value.hashCode()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java index 393b7fbc65..6ff0a366d1 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java @@ -16,12 +16,12 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.api.services.bigquery.model.Streamingbuffer; import com.google.api.services.bigquery.model.Table; @@ -29,7 +29,7 @@ import com.google.cloud.bigquery.StandardTableDefinition.StreamingBuffer; import com.google.common.collect.ImmutableList; import com.google.common.truth.Truth; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class StandardTableDefinitionTest { @@ -51,6 +51,13 @@ public class StandardTableDefinitionTest { private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); private static final Long NUM_BYTES = 42L; private static final Long NUM_LONG_TERM_BYTES = 18L; + private static final Long NUM_TIME_TRAVEL_PHYSICAL_BYTES = 21L; + private static final Long NUM_TOTAL_LOGICAL_BYTES = 22L; + private static final Long NUM_ACTIVE_LOGICAL_BYTES = 23L; + private static final Long NUM_LONG_TERM_LOGICAL_BYTES = 24L; + private static final Long NUM_TOTAL_PHYSICAL_BYTES = 25L; + private static final Long NUM_ACTIVE_PHYSICAL_BYTES = 26L; + private static final Long NUM_LONG_TERM_PHYSICAL_BYTES = 27L; private static final Long NUM_ROWS = 43L; private static final String LOCATION = "US"; private static final StreamingBuffer STREAMING_BUFFER = new StreamingBuffer(1L, 2L, 3L); @@ -58,16 +65,31 @@ public class StandardTableDefinitionTest { TimePartitioning.of(TimePartitioning.Type.DAY, 42); private static final Clustering CLUSTERING = Clustering.newBuilder().setFields(ImmutableList.of("Foo", "Bar")).build(); + private static final BigLakeConfiguration BIG_LAKE_CONFIGURATION = + BigLakeConfiguration.newBuilder() + .setConnectionId("us.connection-test") + .setTableFormat("ICEBERG") + .setFileFormat("PARQUET") + .setStorageUri("gs://java-bigquery-test/standard-table-def") + .build(); private static final StandardTableDefinition TABLE_DEFINITION = StandardTableDefinition.newBuilder() .setLocation(LOCATION) .setNumBytes(NUM_BYTES) .setNumRows(NUM_ROWS) .setNumLongTermBytes(NUM_LONG_TERM_BYTES) + .setNumTimeTravelPhysicalBytes(NUM_TIME_TRAVEL_PHYSICAL_BYTES) + .setNumTotalLogicalBytes(NUM_TOTAL_LOGICAL_BYTES) + .setNumActiveLogicalBytes(NUM_ACTIVE_LOGICAL_BYTES) + .setNumLongTermLogicalBytes(NUM_LONG_TERM_LOGICAL_BYTES) + .setNumTotalPhysicalBytes(NUM_TOTAL_PHYSICAL_BYTES) + .setNumActivePhysicalBytes(NUM_ACTIVE_PHYSICAL_BYTES) + .setNumLongTermPhysicalBytes(NUM_LONG_TERM_PHYSICAL_BYTES) .setStreamingBuffer(STREAMING_BUFFER) .setSchema(TABLE_SCHEMA) .setTimePartitioning(TIME_PARTITIONING) .setClustering(CLUSTERING) + .setBigLakeConfiguration(BIG_LAKE_CONFIGURATION) .build(); @Test @@ -93,6 +115,13 @@ public void testBuilder() { assertEquals(LOCATION, TABLE_DEFINITION.getLocation()); assertEquals(NUM_BYTES, TABLE_DEFINITION.getNumBytes()); assertEquals(NUM_LONG_TERM_BYTES, TABLE_DEFINITION.getNumLongTermBytes()); + assertEquals(NUM_TIME_TRAVEL_PHYSICAL_BYTES, TABLE_DEFINITION.getNumTimeTravelPhysicalBytes()); + assertEquals(NUM_TOTAL_LOGICAL_BYTES, TABLE_DEFINITION.getNumTotalLogicalBytes()); + assertEquals(NUM_ACTIVE_LOGICAL_BYTES, TABLE_DEFINITION.getNumActiveLogicalBytes()); + assertEquals(NUM_LONG_TERM_LOGICAL_BYTES, TABLE_DEFINITION.getNumLongTermLogicalBytes()); + assertEquals(NUM_TOTAL_PHYSICAL_BYTES, TABLE_DEFINITION.getNumTotalPhysicalBytes()); + assertEquals(NUM_ACTIVE_PHYSICAL_BYTES, TABLE_DEFINITION.getNumActivePhysicalBytes()); + assertEquals(NUM_LONG_TERM_PHYSICAL_BYTES, TABLE_DEFINITION.getNumLongTermPhysicalBytes()); assertEquals(NUM_ROWS, TABLE_DEFINITION.getNumRows()); assertEquals(STREAMING_BUFFER, TABLE_DEFINITION.getStreamingBuffer()); assertEquals(TIME_PARTITIONING, TABLE_DEFINITION.getTimePartitioning()); @@ -102,12 +131,10 @@ public void testBuilder() { @Test public void testTypeNullPointerException() { - try { - TABLE_DEFINITION.toBuilder().setType(null).build(); - fail(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + NullPointerException ex = + assertThrows( + NullPointerException.class, () -> TABLE_DEFINITION.toBuilder().setType(null).build()); + assertNotNull(ex.getMessage()); } @Test @@ -118,6 +145,13 @@ public void testOf() { assertNull(definition.getLocation()); assertNull(definition.getNumBytes()); assertNull(definition.getNumLongTermBytes()); + assertNull(definition.getNumTimeTravelPhysicalBytes()); + assertNull(definition.getNumTotalLogicalBytes()); + assertNull(definition.getNumActiveLogicalBytes()); + assertNull(definition.getNumLongTermLogicalBytes()); + assertNull(definition.getNumTotalPhysicalBytes()); + assertNull(definition.getNumActivePhysicalBytes()); + assertNull(definition.getNumLongTermPhysicalBytes()); assertNull(definition.getNumRows()); assertNull(definition.getStreamingBuffer()); assertNull(definition.getTimePartitioning()); @@ -147,15 +181,12 @@ public void testFromPbWithUnexpectedTimePartitioningTypeRaisesInvalidArgumentExc .setTableId("ILLEGAL_ARG_TEST_TABLE")) .setTimePartitioning( new com.google.api.services.bigquery.model.TimePartitioning().setType("GHURRY")); - try { - StandardTableDefinition.fromPb(invalidTable); - } catch (IllegalArgumentException ie) { - Truth.assertThat(ie.getMessage()) - .contains( - "Illegal Argument - Got unexpected time partitioning GHURRY in project ILLEGAL_ARG_TEST_PROJECT in dataset ILLEGAL_ARG_TEST_DATASET in table ILLEGAL_ARG_TEST_TABLE"); - return; - } - fail("testFromPb illegal argument exception did not throw!"); + IllegalArgumentException ie = + assertThrows( + IllegalArgumentException.class, () -> StandardTableDefinition.fromPb(invalidTable)); + Truth.assertThat(ie.getMessage()) + .contains( + "Illegal Argument - Got unexpected time partitioning GHURRY in project ILLEGAL_ARG_TEST_PROJECT in dataset ILLEGAL_ARG_TEST_DATASET in table ILLEGAL_ARG_TEST_TABLE"); } @Test @@ -176,6 +207,13 @@ private void compareStandardTableDefinition( assertEquals(expected.getType(), value.getType()); assertEquals(expected.getNumBytes(), value.getNumBytes()); assertEquals(expected.getNumLongTermBytes(), value.getNumLongTermBytes()); + assertEquals(expected.getNumTimeTravelPhysicalBytes(), value.getNumTimeTravelPhysicalBytes()); + assertEquals(expected.getNumTotalLogicalBytes(), value.getNumTotalLogicalBytes()); + assertEquals(expected.getNumActiveLogicalBytes(), value.getNumActiveLogicalBytes()); + assertEquals(expected.getNumLongTermLogicalBytes(), value.getNumLongTermLogicalBytes()); + assertEquals(expected.getNumTotalPhysicalBytes(), value.getNumTotalPhysicalBytes()); + assertEquals(expected.getNumActivePhysicalBytes(), value.getNumActivePhysicalBytes()); + assertEquals(expected.getNumLongTermPhysicalBytes(), value.getNumLongTermPhysicalBytes()); assertEquals(expected.getNumRows(), value.getNumRows()); assertEquals(expected.getLocation(), value.getLocation()); assertEquals(expected.getStreamingBuffer(), value.getStreamingBuffer()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java new file mode 100644 index 0000000000..b074b2f22c --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableConstraintsTest.java @@ -0,0 +1,118 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import org.junit.jupiter.api.Test; + +class TableConstraintsTest { + private static final List COLUMNS_PK = Arrays.asList("column1", "column2"); + private static final PrimaryKey PRIMARY_KEY = + PrimaryKey.newBuilder().setColumns(COLUMNS_PK).build(); + private static final TableId TABLE_ID_PK = TableId.of("project", "dataset", "table"); + + private static final ColumnReference COLUMN_REFERENCE = + ColumnReference.newBuilder() + .setReferencingColumn("column1") + .setReferencedColumn("column2") + .build(); + private static final ForeignKey FOREIGN_KEY = + ForeignKey.newBuilder() + .setName("foreign_key") + .setReferencedTable(TABLE_ID_PK) + .setColumnReferences(Collections.singletonList(COLUMN_REFERENCE)) + .build(); + + private static final TableConstraints TABLE_CONSTRAINTS = + TableConstraints.newBuilder() + .setPrimaryKey(PRIMARY_KEY) + .setForeignKeys(Collections.singletonList(FOREIGN_KEY)) + .build(); + + @Test + void testToBuilder() { + compareTableConstraintsDefinition(TABLE_CONSTRAINTS, TABLE_CONSTRAINTS.toBuilder().build()); + List columnsPk = Arrays.asList("col1", "col2", "col3"); + PrimaryKey primaryKey = PrimaryKey.newBuilder().setColumns(columnsPk).build(); + TableId referencedTable = TableId.of("project1", "dataset1", "table1"); + TableId referencedTable2 = TableId.of("project2", "dataset2", "table2"); + ArrayList columnReferences = new ArrayList<>(); + columnReferences.add( + ColumnReference.newBuilder() + .setReferencingColumn("from") + .setReferencedColumn("to") + .build()); + columnReferences.add( + ColumnReference.newBuilder() + .setReferencingColumn("from2") + .setReferencedColumn("to2") + .build()); + ForeignKey foreignKey1 = + ForeignKey.newBuilder() + .setName("test") + .setReferencedTable(referencedTable) + .setColumnReferences(columnReferences) + .build(); + ForeignKey foreignKey2 = + ForeignKey.newBuilder() + .setName("test") + .setReferencedTable(referencedTable2) + .setColumnReferences(columnReferences) + .build(); + + TableConstraints tableConstraints = + TABLE_CONSTRAINTS.toBuilder() + .setForeignKeys(Arrays.asList(foreignKey1, foreignKey2)) + .setPrimaryKey(primaryKey) + .build(); + assertEquals(Arrays.asList(foreignKey1, foreignKey2), tableConstraints.getForeignKeys()); + assertEquals(primaryKey, tableConstraints.getPrimaryKey()); + } + + @Test + void testBuilder() { + assertEquals(Collections.singletonList(FOREIGN_KEY), TABLE_CONSTRAINTS.getForeignKeys()); + assertEquals(PRIMARY_KEY, TABLE_CONSTRAINTS.getPrimaryKey()); + TableConstraints tableConstraints = + TABLE_CONSTRAINTS + .newBuilder() + .setForeignKeys(Collections.singletonList(FOREIGN_KEY)) + .setPrimaryKey(PRIMARY_KEY) + .build(); + assertEquals(TABLE_CONSTRAINTS, tableConstraints); + } + + @Test + void testToAndFromPb() { + TableConstraints tableConstraints = TABLE_CONSTRAINTS.toBuilder().build(); + assertTrue(TableConstraints.fromPb(tableConstraints.toPb()) instanceof TableConstraints); + compareTableConstraintsDefinition( + tableConstraints, TableConstraints.fromPb(tableConstraints.toPb())); + } + + private void compareTableConstraintsDefinition( + TableConstraints expected, TableConstraints value) { + assertEquals(expected.getForeignKeys(), value.getForeignKeys()); + assertEquals(expected.getPrimaryKey(), value.getPrimaryKey()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java index a959a89916..8752b27086 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.any; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; @@ -31,21 +31,25 @@ import com.google.cloud.RestorableState; import com.google.cloud.WriteChannel; import com.google.cloud.bigquery.spi.BigQueryRpcFactory; -import com.google.cloud.bigquery.spi.v2.BigQueryRpc; +import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc; import java.io.IOException; +import java.net.ConnectException; import java.net.SocketException; +import java.net.UnknownHostException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Random; -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; -import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; +import org.mockito.junit.jupiter.MockitoSettings; +import org.mockito.quality.Strictness; -@RunWith(MockitoJUnitRunner.class) +@ExtendWith(MockitoExtension.class) +@MockitoSettings(strictness = Strictness.LENIENT) public class TableDataWriteChannelTest { private static final String UPLOAD_ID = "uploadid"; @@ -68,7 +72,7 @@ public class TableDataWriteChannelTest { private BigQueryOptions options; private BigQueryRpcFactory rpcFactoryMock; - private BigQueryRpc bigqueryRpcMock; + private HttpBigQueryRpc bigqueryRpcMock; private BigQueryFactory bigqueryFactoryMock; private BigQuery bigqueryMock; private Job job; @@ -78,10 +82,10 @@ public class TableDataWriteChannelTest { private TableDataWriteChannel writer; - @Before + @BeforeEach public void setUp() { rpcFactoryMock = mock(BigQueryRpcFactory.class); - bigqueryRpcMock = mock(BigQueryRpc.class); + bigqueryRpcMock = mock(HttpBigQueryRpc.class); bigqueryFactoryMock = mock(BigQueryFactory.class); bigqueryMock = mock(BigQuery.class); when(bigqueryMock.getOptions()).thenReturn(options); @@ -97,8 +101,8 @@ public void setUp() { } @Test - public void testCreate() { - when(bigqueryRpcMock.open( + public void testCreate() throws IOException { + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) @@ -107,26 +111,27 @@ public void testCreate() { assertTrue(writer.isOpen()); assertNull(writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); } @Test - public void testCreateRetryableError() { - BigQueryException exception = new BigQueryException(new SocketException("Socket closed")); - when(bigqueryRpcMock.open( + public void testCreateRetryableErrors() throws IOException { + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) - .thenThrow(exception) + .thenThrow(new SocketException("Socket closed")) + .thenThrow(new UnknownHostException()) + .thenThrow(new ConnectException()) .thenReturn(UPLOAD_ID); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); assertTrue(writer.isOpen()); assertNull(writer.getJob()); - verify(bigqueryRpcMock, times(2)) - .open( + verify(bigqueryRpcMock, times(4)) + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); @@ -134,20 +139,21 @@ public void testCreateRetryableError() { @Test public void testCreateNonRetryableError() throws IOException { - RuntimeException ex = new RuntimeException("expected"); - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) - .thenThrow(ex); - try (TableDataWriteChannel channel = - new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION)) { - Assert.fail(); - } catch (RuntimeException expected) { - Assert.assertEquals("java.lang.RuntimeException: expected", expected.getMessage()); - } + .thenThrow(new RuntimeException("expected")); + RuntimeException expected = + assertThrows( + RuntimeException.class, + () -> { + try (TableDataWriteChannel channel = + new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION)) {} + }); + assertEquals("java.lang.RuntimeException: expected", expected.getMessage()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); @@ -155,7 +161,7 @@ public void testCreateNonRetryableError() throws IOException { @Test public void testWriteWithoutFlush() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) @@ -164,7 +170,7 @@ public void testWriteWithoutFlush() throws IOException { assertEquals(MIN_CHUNK_SIZE, writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); assertNull(writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); @@ -172,12 +178,12 @@ public void testWriteWithoutFlush() throws IOException { @Test public void testWriteWithFlush() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), @@ -192,12 +198,12 @@ public void testWriteWithFlush() throws IOException { assertArrayEquals(buffer.array(), capturedBuffer.getValue()); assertNull(writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write( + .writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), @@ -207,19 +213,22 @@ public void testWriteWithFlush() throws IOException { } @Test - public void testWritesAndFlush() throws IOException { - when(bigqueryRpcMock.open( + public void testWritesAndFlushRetryableErrors() throws IOException { + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(DEFAULT_CHUNK_SIZE), eq(false))) + .thenThrow(new SocketException("Socket closed")) + .thenThrow(new UnknownHostException()) + .thenThrow(new ConnectException()) .thenReturn(null); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; @@ -235,12 +244,54 @@ public void testWritesAndFlush() throws IOException { } assertNull(writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); + verify(bigqueryRpcMock, times(4)) + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), + capturedBuffer.capture(), + eq(0), + eq(0L), + eq(DEFAULT_CHUNK_SIZE), + eq(false)); + } + + @Test + public void testWritesAndFlushNonRetryableError() throws IOException { + when(bigqueryRpcMock.openSkipExceptionTranslation( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb()))) + .thenReturn(UPLOAD_ID); + when(bigqueryRpcMock.writeSkipExceptionTranslation( + eq(UPLOAD_ID), + capturedBuffer.capture(), + eq(0), + eq(0L), + eq(DEFAULT_CHUNK_SIZE), + eq(false))) + .thenThrow(new RuntimeException("expected")); + RuntimeException expected = + assertThrows( + RuntimeException.class, + () -> { + writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); + ByteBuffer[] buffers = new ByteBuffer[DEFAULT_CHUNK_SIZE / MIN_CHUNK_SIZE]; + for (int i = 0; i < buffers.length; i++) { + buffers[i] = randomBuffer(MIN_CHUNK_SIZE); + assertEquals(MIN_CHUNK_SIZE, writer.write(buffers[i])); + } + }); + assertEquals("java.lang.RuntimeException: expected", expected.getMessage()); verify(bigqueryRpcMock) - .write( + .openSkipExceptionTranslation( + new com.google.api.services.bigquery.model.Job() + .setJobReference(JOB_INFO.getJobId().toPb()) + .setConfiguration(LOAD_CONFIGURATION.toPb())); + verify(bigqueryRpcMock, times(1)) + .writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), @@ -251,12 +302,12 @@ public void testWritesAndFlush() throws IOException { @Test public void testCloseWithoutFlush() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -266,23 +317,24 @@ public void testCloseWithoutFlush() throws IOException { assertTrue(!writer.isOpen()); assertEquals(job, writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } @Test public void testCloseWithFlush() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); ByteBuffer buffer = randomBuffer(MIN_CHUNK_SIZE); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(MIN_CHUNK_SIZE), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -294,51 +346,47 @@ public void testCloseWithFlush() throws IOException { assertTrue(!writer.isOpen()); assertEquals(job, writer.getJob()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write( + .writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(MIN_CHUNK_SIZE), eq(true)); } @Test public void testWriteClosed() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); writer.close(); assertEquals(job, writer.getJob()); - try { - writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE)); - fail("Expected TableDataWriteChannel write to throw IOException"); - } catch (IOException ex) { - // expected - } + assertThrows(IOException.class, () -> writer.write(ByteBuffer.allocate(MIN_CHUNK_SIZE))); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } @Test public void testSaveAndRestore() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), @@ -359,12 +407,12 @@ public void testSaveAndRestore() throws IOException { assertArrayEquals(buffer2.array(), capturedBuffer.getAllValues().get(1)); assertEquals(new Long(DEFAULT_CHUNK_SIZE), capturedPosition.getAllValues().get(1)); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock, times(2)) - .write( + .writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), @@ -375,12 +423,12 @@ public void testSaveAndRestore() throws IOException { @Test public void testSaveAndRestoreClosed() throws IOException { - when(bigqueryRpcMock.open( + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) .thenReturn(UPLOAD_ID); - when(bigqueryRpcMock.write( + when(bigqueryRpcMock.writeSkipExceptionTranslation( eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true))) .thenReturn(job.toPb()); writer = new TableDataWriteChannel(options, JOB_INFO.getJobId(), LOAD_CONFIGURATION); @@ -398,17 +446,18 @@ public void testSaveAndRestoreClosed() throws IOException { assertArrayEquals(new byte[0], capturedBuffer.getValue()); assertEquals(expectedWriterState, restoredWriter.capture()); verify(bigqueryRpcMock) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); verify(bigqueryRpcMock) - .write(eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); + .writeSkipExceptionTranslation( + eq(UPLOAD_ID), capturedBuffer.capture(), eq(0), eq(0L), eq(0), eq(true)); } @Test - public void testStateEquals() { - when(bigqueryRpcMock.open( + public void testStateEquals() throws IOException { + when(bigqueryRpcMock.openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb()))) @@ -424,7 +473,7 @@ public void testStateEquals() { assertEquals(state.hashCode(), state2.hashCode()); assertEquals(state.toString(), state2.toString()); verify(bigqueryRpcMock, times(2)) - .open( + .openSkipExceptionTranslation( new com.google.api.services.bigquery.model.Job() .setJobReference(JOB_INFO.getJobId().toPb()) .setConfiguration(LOAD_CONFIGURATION.toPb())); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java index dc28ff8610..02154db0ca 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java @@ -16,11 +16,11 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TableIdTest { +class TableIdTest { private static final TableId TABLE = TableId.of("dataset", "table"); private static final TableId TABLE_COMPLETE = TableId.of("project", "dataset", "table"); @@ -28,7 +28,7 @@ public class TableIdTest { "projects/project/datasets/dataset/tables/table"; @Test - public void testOf() { + void testOf() { assertEquals(null, TABLE.getProject()); assertEquals("dataset", TABLE.getDataset()); assertEquals("table", TABLE.getTable()); @@ -39,19 +39,19 @@ public void testOf() { } @Test - public void testEquals() { + void testEquals() { compareTableIds(TABLE, TableId.of("dataset", "table")); compareTableIds(TABLE_COMPLETE, TableId.of("project", "dataset", "table")); } @Test - public void testToPbAndFromPb() { + void testToPbAndFromPb() { compareTableIds(TABLE, TableId.fromPb(TABLE.toPb())); compareTableIds(TABLE_COMPLETE, TableId.fromPb(TABLE_COMPLETE.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { TableId differentProjectTable = TableId.of("differentProject", "dataset", "table"); assertEquals(differentProjectTable, TABLE.setProjectId("differentProject")); } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableInfoTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableInfoTest.java index a44f700391..4dd488a680 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableInfoTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableInfoTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.common.collect.ImmutableList; import java.math.BigInteger; import java.util.Collections; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TableInfoTest { +class TableInfoTest { private static final String ETAG = "etag"; private static final String GENERATED_ID = "project:dataset:table"; @@ -58,6 +58,13 @@ public class TableInfoTest { private static final Schema TABLE_SCHEMA = Schema.of(FIELD_SCHEMA1, FIELD_SCHEMA2, FIELD_SCHEMA3); private static final Long NUM_BYTES = 42L; private static final Long NUM_LONG_TERM_BYTES = 21L; + private static final Long NUM_TIME_TRAVEL_PHYSICAL_BYTES = 21L; + private static final Long NUM_TOTAL_LOGICAL_BYTES = 22L; + private static final Long NUM_ACTIVE_LOGICAL_BYTES = 23L; + private static final Long NUM_LONG_TERM_LOGICAL_BYTES = 24L; + private static final Long NUM_TOTAL_PHYSICAL_BYTES = 25L; + private static final Long NUM_ACTIVE_PHYSICAL_BYTES = 26L; + private static final Long NUM_LONG_TERM_PHYSICAL_BYTES = 27L; private static final Long NUM_ROWS = 43L; private static final String LOCATION = "US"; private static final StandardTableDefinition.StreamingBuffer STREAMING_BUFFER = @@ -67,6 +74,13 @@ public class TableInfoTest { .setLocation(LOCATION) .setNumBytes(NUM_BYTES) .setNumLongTermBytes(NUM_LONG_TERM_BYTES) + .setNumTimeTravelPhysicalBytes(NUM_TIME_TRAVEL_PHYSICAL_BYTES) + .setNumTotalLogicalBytes(NUM_TOTAL_LOGICAL_BYTES) + .setNumActiveLogicalBytes(NUM_ACTIVE_LOGICAL_BYTES) + .setNumLongTermLogicalBytes(NUM_LONG_TERM_LOGICAL_BYTES) + .setNumTotalPhysicalBytes(NUM_TOTAL_PHYSICAL_BYTES) + .setNumActivePhysicalBytes(NUM_ACTIVE_PHYSICAL_BYTES) + .setNumLongTermPhysicalBytes(NUM_LONG_TERM_PHYSICAL_BYTES) .setNumRows(NUM_ROWS) .setStreamingBuffer(STREAMING_BUFFER) .setSchema(TABLE_SCHEMA) @@ -102,9 +116,17 @@ public class TableInfoTest { .setLastModifiedTime(LAST_MODIFIED_TIME) .setNumBytes(NUM_BYTES) .setNumLongTermBytes(NUM_LONG_TERM_BYTES) + .setNumTimeTravelPhysicalBytes(NUM_TIME_TRAVEL_PHYSICAL_BYTES) + .setNumTotalLogicalBytes(NUM_TOTAL_LOGICAL_BYTES) + .setNumActiveLogicalBytes(NUM_ACTIVE_LOGICAL_BYTES) + .setNumLongTermLogicalBytes(NUM_LONG_TERM_LOGICAL_BYTES) + .setNumTotalPhysicalBytes(NUM_TOTAL_PHYSICAL_BYTES) + .setNumActivePhysicalBytes(NUM_ACTIVE_PHYSICAL_BYTES) + .setNumLongTermPhysicalBytes(NUM_LONG_TERM_PHYSICAL_BYTES) .setNumRows(BigInteger.valueOf(NUM_ROWS)) .setSelfLink(SELF_LINK) .setLabels(Collections.singletonMap("a", "b")) + .setResourceTags(Collections.singletonMap("resourceTagA", "resourceTagB")) .setRequirePartitionFilter(REQUIRE_PARTITION_FILTER) .build(); private static final TableInfo VIEW_INFO = @@ -131,7 +153,7 @@ public class TableInfoTest { .build(); @Test - public void testToBuilder() { + void testToBuilder() { compareTableInfo(TABLE_INFO, TABLE_INFO.toBuilder().build()); compareTableInfo(VIEW_INFO, VIEW_INFO.toBuilder().build()); compareTableInfo(EXTERNAL_TABLE_INFO, EXTERNAL_TABLE_INFO.toBuilder().build()); @@ -142,7 +164,7 @@ public void testToBuilder() { } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { TableInfo tableInfo = TableInfo.of(TABLE_ID, TABLE_DEFINITION); assertEquals(tableInfo, tableInfo.toBuilder().build()); tableInfo = TableInfo.of(TABLE_ID, VIEW_DEFINITION); @@ -152,7 +174,7 @@ public void testToBuilderIncomplete() { } @Test - public void testBuilder() { + void testBuilder() { assertEquals(TABLE_ID, TABLE_INFO.getTableId()); assertEquals(CREATION_TIME, TABLE_INFO.getCreationTime()); assertEquals(DESCRIPTION, TABLE_INFO.getDescription()); @@ -166,6 +188,13 @@ public void testBuilder() { assertEquals(SELF_LINK, TABLE_INFO.getSelfLink()); assertEquals(NUM_BYTES, TABLE_INFO.getNumBytes()); assertEquals(NUM_LONG_TERM_BYTES, TABLE_INFO.getNumLongTermBytes()); + assertEquals(NUM_TIME_TRAVEL_PHYSICAL_BYTES, TABLE_INFO.getNumTimeTravelPhysicalBytes()); + assertEquals(NUM_TOTAL_LOGICAL_BYTES, TABLE_INFO.getNumTotalLogicalBytes()); + assertEquals(NUM_ACTIVE_LOGICAL_BYTES, TABLE_INFO.getNumActiveLogicalBytes()); + assertEquals(NUM_LONG_TERM_LOGICAL_BYTES, TABLE_INFO.getNumLongTermLogicalBytes()); + assertEquals(NUM_TOTAL_PHYSICAL_BYTES, TABLE_INFO.getNumTotalPhysicalBytes()); + assertEquals(NUM_ACTIVE_PHYSICAL_BYTES, TABLE_INFO.getNumActivePhysicalBytes()); + assertEquals(NUM_LONG_TERM_PHYSICAL_BYTES, TABLE_INFO.getNumLongTermPhysicalBytes()); assertEquals(BigInteger.valueOf(NUM_ROWS), TABLE_INFO.getNumRows()); assertEquals(REQUIRE_PARTITION_FILTER, TABLE_INFO.getRequirePartitionFilter()); @@ -194,7 +223,7 @@ public void testBuilder() { } @Test - public void testOf() { + void testOf() { TableInfo tableInfo = TableInfo.of(TABLE_ID, TABLE_DEFINITION); assertEquals(TABLE_ID, tableInfo.getTableId()); assertNull(tableInfo.getCreationTime()); @@ -237,21 +266,21 @@ public void testOf() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareTableInfo(TABLE_INFO, TableInfo.fromPb(TABLE_INFO.toPb())); compareTableInfo(VIEW_INFO, TableInfo.fromPb(VIEW_INFO.toPb())); compareTableInfo(EXTERNAL_TABLE_INFO, TableInfo.fromPb(EXTERNAL_TABLE_INFO.toPb())); } @Test - public void testSetProjectId() { + void testSetProjectId() { assertEquals("project", TABLE_INFO.setProjectId("project").getTableId().getProject()); assertEquals("project", EXTERNAL_TABLE_INFO.setProjectId("project").getTableId().getProject()); assertEquals("project", VIEW_INFO.setProjectId("project").getTableId().getProject()); } @Test - public void testSetProjectIdDoNotOverride() { + void testSetProjectIdDoNotOverride() { TableInfo tableInfo = TableInfo.of(TABLE_ID, TABLE_DEFINITION).setProjectId("project"); tableInfo.setProjectId("not-override-project").toBuilder(); assertEquals("project", tableInfo.getTableId().getProject()); @@ -271,9 +300,17 @@ private void compareTableInfo(TableInfo expected, TableInfo value) { assertEquals(expected.getLastModifiedTime(), value.getLastModifiedTime()); assertEquals(expected.getNumBytes(), value.getNumBytes()); assertEquals(expected.getNumLongTermBytes(), value.getNumLongTermBytes()); + assertEquals(expected.getNumTimeTravelPhysicalBytes(), value.getNumTimeTravelPhysicalBytes()); + assertEquals(expected.getNumTotalLogicalBytes(), value.getNumTotalLogicalBytes()); + assertEquals(expected.getNumActiveLogicalBytes(), value.getNumActiveLogicalBytes()); + assertEquals(expected.getNumLongTermLogicalBytes(), value.getNumLongTermLogicalBytes()); + assertEquals(expected.getNumTotalPhysicalBytes(), value.getNumTotalPhysicalBytes()); + assertEquals(expected.getNumActivePhysicalBytes(), value.getNumActivePhysicalBytes()); + assertEquals(expected.getNumLongTermPhysicalBytes(), value.getNumLongTermPhysicalBytes()); assertEquals(expected.getNumRows(), value.getNumRows()); assertEquals(expected.getSelfLink(), value.getSelfLink()); assertEquals(expected.getLabels(), value.getLabels()); + assertEquals(expected.getResourceTags(), value.getResourceTags()); assertEquals(expected.getRequirePartitionFilter(), value.getRequirePartitionFilter()); assertEquals(expected.toString(), value.toString()); assertEquals(expected.hashCode(), value.hashCode()); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableMetadataCacheUsageTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableMetadataCacheUsageTest.java new file mode 100644 index 0000000000..dc996693cc --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableMetadataCacheUsageTest.java @@ -0,0 +1,70 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.google.api.services.bigquery.model.TableReference; +import com.google.cloud.bigquery.TableMetadataCacheUsage.UnusedReason; +import org.junit.jupiter.api.Test; + +class TableMetadataCacheUsageTest { + + private static final String EXPLANATION = "test explanation"; + + private static final String TABLE_TYPE = "test tableType"; + + private static final UnusedReason UNUSED_REASON = UnusedReason.UNUSED_REASON_UNSPECIFIED; + private static final TableReference TABLE_REFERENCE = + new TableReference() + .setTableId("test tableId") + .setProjectId("test projectId") + .setDatasetId("test dataset"); + private static final TableMetadataCacheUsage TABLE_METADATA_CACHE_USAGE = + TableMetadataCacheUsage.newBuilder() + .setExplanation(EXPLANATION) + .setTableType(TABLE_TYPE) + .setUnusedReason(UNUSED_REASON) + .setTableReference(TableId.fromPb(TABLE_REFERENCE)) + .build(); + + private static final com.google.api.services.bigquery.model.TableMetadataCacheUsage + TABLE_METADATA_CACHE_USAGE_PB = + new com.google.api.services.bigquery.model.TableMetadataCacheUsage() + .setTableReference(TABLE_REFERENCE) + .setExplanation(EXPLANATION) + .setTableType(TABLE_TYPE) + .setUnusedReason(UNUSED_REASON.toString()); + + @Test + void testToPbAndFromPb() { + assertEquals(TABLE_METADATA_CACHE_USAGE_PB, TABLE_METADATA_CACHE_USAGE.toPb()); + compareTableMetadataCacheUsage( + TABLE_METADATA_CACHE_USAGE, TableMetadataCacheUsage.fromPb(TABLE_METADATA_CACHE_USAGE_PB)); + } + + private void compareTableMetadataCacheUsage( + TableMetadataCacheUsage expected, TableMetadataCacheUsage value) { + assertEquals(expected, value); + assertEquals(expected.hashCode(), value.hashCode()); + assertEquals(expected.toString(), value.toString()); + assertEquals(expected.getExplanation(), value.getExplanation()); + assertEquals(expected.getTableType(), value.getTableType()); + assertEquals(expected.getUnusedReason(), value.getUnusedReason()); + assertEquals(expected.getTableReference(), value.getTableReference()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableResultTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableResultTest.java index 35a167af12..5bdb14cf49 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableResultTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableResultTest.java @@ -22,9 +22,9 @@ import com.google.api.gax.paging.Page; import com.google.cloud.PageImpl; import com.google.common.collect.ImmutableList; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TableResultTest { +class TableResultTest { private static final Page INNER_PAGE_0 = new PageImpl<>( new PageImpl.NextPageFetcher() { @@ -52,8 +52,9 @@ private static FieldValueList newFieldValueList(String s) { } @Test - public void testNullSchema() { - TableResult result = new TableResult(null, 3, INNER_PAGE_0); + void testNullSchema() { + TableResult result = + TableResult.newBuilder().setTotalRows(3L).setPageNoSchema(INNER_PAGE_0).build(); assertThat(result.getSchema()).isNull(); assertThat(result.hasNextPage()).isTrue(); assertThat(result.getNextPageToken()).isNotNull(); @@ -74,8 +75,13 @@ public void testNullSchema() { } @Test - public void testSchema() { - TableResult result = new TableResult(SCHEMA, 3, INNER_PAGE_0); + void testSchema() { + TableResult result = + TableResult.newBuilder() + .setSchema(SCHEMA) + .setTotalRows(3L) + .setPageNoSchema(INNER_PAGE_0) + .build(); assertThat(result.getSchema()).isEqualTo(SCHEMA); assertThat(result.hasNextPage()).isTrue(); assertThat(result.getNextPageToken()).isNotNull(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java index b93ed770b8..6ad7822d9b 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java @@ -17,11 +17,11 @@ package com.google.cloud.bigquery; import static com.google.common.truth.Truth.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertSame; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -34,14 +34,12 @@ import com.google.common.collect.ImmutableMap; import java.math.BigInteger; import java.util.List; -import org.junit.Before; -import org.junit.Rule; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.junit.MockitoJUnitRunner; -import org.mockito.junit.MockitoRule; - -@RunWith(MockitoJUnitRunner.class) +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) public class TableTest { private static final String ETAG = "etag"; @@ -97,14 +95,12 @@ public class TableTest { FieldValueList.of(ImmutableList.of(FIELD_VALUE1)).withSchema(SCHEMA.getFields()), FieldValueList.of(ImmutableList.of(FIELD_VALUE2)).withSchema(SCHEMA.getFields())); - @Rule public MockitoRule rule; - private BigQuery bigquery; private BigQueryOptions mockOptions; private Table expectedTable; private Table table; - @Before + @BeforeEach public void setUp() { bigquery = mock(BigQuery.class); mockOptions = mock(BigQueryOptions.class); @@ -248,9 +244,19 @@ public void testInsertComplete() { @Test public void testList() { Page page = new PageImpl<>(null, "c", ROWS); - when(bigquery.listTableData(TABLE_ID1)).thenReturn(new TableResult(null, ROWS.size(), page)); + when(bigquery.listTableData(TABLE_ID1)) + .thenReturn( + TableResult.newBuilder() + .setTotalRows((long) ROWS.size()) + .setPageNoSchema(page) + .build()); when(bigquery.listTableData(TABLE_ID1, SCHEMA)) - .thenReturn(new TableResult(SCHEMA, ROWS.size(), page)); + .thenReturn( + TableResult.newBuilder() + .setSchema(SCHEMA) + .setTotalRows((long) ROWS.size()) + .setPageNoSchema(page) + .build()); Page dataPage = table.list(); assertThat(dataPage.getValues()).containsExactlyElementsIn(ROWS).inOrder(); dataPage = table.list(SCHEMA); @@ -263,9 +269,18 @@ public void testList() { public void testListWithOptions() { Page page = new PageImpl<>(null, "c", ROWS); when(bigquery.listTableData(TABLE_ID1, BigQuery.TableDataListOption.pageSize(10L))) - .thenReturn(new TableResult(null, ROWS.size(), page)); + .thenReturn( + TableResult.newBuilder() + .setTotalRows((long) ROWS.size()) + .setPageNoSchema(page) + .build()); when(bigquery.listTableData(TABLE_ID1, SCHEMA, BigQuery.TableDataListOption.pageSize(10L))) - .thenReturn(new TableResult(SCHEMA, ROWS.size(), page)); + .thenReturn( + TableResult.newBuilder() + .setSchema(SCHEMA) + .setTotalRows((long) ROWS.size()) + .setPageNoSchema(page) + .build()); Page dataPage = table.list(BigQuery.TableDataListOption.pageSize(10L)); assertThat(dataPage.getValues()).containsExactlyElementsIn(ROWS).inOrder(); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java index 1e48c817ed..ec947381b0 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java @@ -16,16 +16,16 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; import com.google.cloud.bigquery.TimePartitioning.Type; -import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class TimePartitioningTest { +class TimePartitioningTest { private static final Type TYPE_DAY = Type.DAY; private static final Type TYPE_HOUR = Type.HOUR; @@ -60,7 +60,7 @@ public class TimePartitioningTest { .build(); @Test - public void testOf() { + void testOf() { assertEquals(TYPE_DAY, TIME_PARTITIONING_DAY.getType()); assertEquals(TYPE_HOUR, TIME_PARTITIONING_HOUR.getType()); assertEquals(TYPE_MONTH, TIME_PARTITIONING_MONTH.getType()); @@ -74,7 +74,7 @@ public void testOf() { } @Test - public void testBuilder() { + void testBuilder() { TimePartitioning partitioning = TimePartitioning.newBuilder(TYPE_DAY).build(); assertEquals(TYPE_DAY, partitioning.getType()); assertNull(partitioning.getExpirationMs()); @@ -90,27 +90,21 @@ public void testBuilder() { } @Test - public void testTypeOf_Npe() { - try { - TimePartitioning.of(null); - Assert.fail(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + void testTypeOf_Npe() { + NullPointerException ex = + assertThrows(NullPointerException.class, () -> TimePartitioning.of(null)); + assertNotNull(ex.getMessage()); } @Test - public void testTypeAndExpirationOf_Npe() { - try { - TimePartitioning.of(null, EXPIRATION_MS); - Assert.fail(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + void testTypeAndExpirationOf_Npe() { + NullPointerException ex = + assertThrows(NullPointerException.class, () -> TimePartitioning.of(null, EXPIRATION_MS)); + assertNotNull(ex.getMessage()); } @Test - public void testToAndFromPb() { + void testToAndFromPb() { compareTimePartitioning( TIME_PARTITIONING_DAY, TimePartitioning.fromPb(TIME_PARTITIONING_DAY.toPb())); TimePartitioning partitioning = TimePartitioning.of(TYPE_DAY); diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java index 1d888f00df..22f419593d 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java @@ -15,10 +15,10 @@ */ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class TimelineSampleTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/UserDefinedFunctionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/UserDefinedFunctionTest.java index 93657b44c1..81622527a3 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/UserDefinedFunctionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/UserDefinedFunctionTest.java @@ -16,9 +16,9 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class UserDefinedFunctionTest { diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java index d60c7be2bc..60eeea7668 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java @@ -16,19 +16,18 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.common.collect.ImmutableList; import java.util.List; -import org.junit.Test; +import org.junit.jupiter.api.Test; -public class ViewDefinitionTest { +class ViewDefinitionTest { private static final String VIEW_QUERY = "VIEW QUERY"; private static final List USER_DEFINED_FUNCTIONS = @@ -37,7 +36,7 @@ public class ViewDefinitionTest { ViewDefinition.newBuilder(VIEW_QUERY, USER_DEFINED_FUNCTIONS).setSchema(Schema.of()).build(); @Test - public void testToBuilder() { + void testToBuilder() { compareViewDefinition(VIEW_DEFINITION, VIEW_DEFINITION.toBuilder().build()); ViewDefinition viewDefinition = VIEW_DEFINITION.toBuilder().setQuery("NEW QUERY").build(); assertEquals("NEW QUERY", viewDefinition.getQuery()); @@ -50,23 +49,21 @@ public void testToBuilder() { } @Test - public void testTypeNullPointerException() { - try { - VIEW_DEFINITION.toBuilder().setType(null).build(); - fail(); - } catch (NullPointerException ex) { - assertNotNull(ex.getMessage()); - } + void testTypeNullPointerException() { + NullPointerException ex = + org.junit.jupiter.api.Assertions.assertThrows( + NullPointerException.class, () -> VIEW_DEFINITION.toBuilder().setType(null).build()); + assertNotNull(ex.getMessage()); } @Test - public void testToBuilderIncomplete() { + void testToBuilderIncomplete() { TableDefinition viewDefinition = ViewDefinition.of(VIEW_QUERY); assertEquals(viewDefinition, viewDefinition.toBuilder().build()); } @Test - public void testBuilder() { + void testBuilder() { assertEquals(VIEW_QUERY, VIEW_DEFINITION.getQuery()); assertEquals(TableDefinition.Type.VIEW, VIEW_DEFINITION.getType()); assertEquals(USER_DEFINED_FUNCTIONS, VIEW_DEFINITION.getUserDefinedFunctions()); @@ -106,7 +103,7 @@ public void testBuilder() { } @Test - public void testToAndFromPb() { + void testToAndFromPb() { ViewDefinition viewDefinition = VIEW_DEFINITION.toBuilder().setUseLegacySql(false).build(); assertTrue(TableDefinition.fromPb(viewDefinition.toPb()) instanceof ViewDefinition); compareViewDefinition( diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java index dbee1094b6..35745235e5 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java @@ -16,8 +16,8 @@ package com.google.cloud.bigquery; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import com.google.cloud.bigquery.JobInfo.CreateDisposition; import com.google.cloud.bigquery.JobInfo.WriteDisposition; @@ -27,7 +27,7 @@ import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class WriteChannelConfigurationTest { @@ -59,6 +59,16 @@ public class WriteChannelConfigurationTest { Clustering.newBuilder().setFields(ImmutableList.of("Foo", "Bar")).build(); private static final Map LABELS = ImmutableMap.of("test-job-name", "test-write-channel"); + private static final List DECIMAL_TARGET_TYPES = + ImmutableList.of("NUMERIC", "BIGNUMERIC"); + + private static final boolean CREATE_SESSION = true; + private static final String KEY = "session_id"; + private static final String VALUE = "session_id_1234567890"; + private static final ConnectionProperty CONNECTION_PROPERTY = + ConnectionProperty.newBuilder().setKey(KEY).setValue(VALUE).build(); + private static final List CONNECTION_PROPERTIES = + ImmutableList.of(CONNECTION_PROPERTY); private static final WriteChannelConfiguration LOAD_CONFIGURATION_CSV = WriteChannelConfiguration.newBuilder(TABLE_ID) .setCreateDisposition(CREATE_DISPOSITION) @@ -73,6 +83,9 @@ public class WriteChannelConfigurationTest { .setTimePartitioning(TIME_PARTITIONING) .setClustering(CLUSTERING) .setLabels(LABELS) + .setDecimalTargetTypes(DECIMAL_TARGET_TYPES) + .setConnectionProperties(CONNECTION_PROPERTIES) + .setCreateSession(CREATE_SESSION) .build(); private static final DatastoreBackupOptions BACKUP_OPTIONS = @@ -104,14 +117,14 @@ public class WriteChannelConfigurationTest { .setTimePartitioning(TIME_PARTITIONING) .setClustering(CLUSTERING) .setUseAvroLogicalTypes(USERAVROLOGICALTYPES) + .setDecimalTargetTypes(DECIMAL_TARGET_TYPES) .build(); @Test public void testToBuilder() { compareLoadConfiguration(LOAD_CONFIGURATION_CSV, LOAD_CONFIGURATION_CSV.toBuilder().build()); WriteChannelConfiguration configuration = - LOAD_CONFIGURATION_CSV - .toBuilder() + LOAD_CONFIGURATION_CSV.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", configuration.getDestinationTable().getTable()); @@ -120,8 +133,7 @@ public void testToBuilder() { compareLoadConfiguration(LOAD_CONFIGURATION_AVRO, LOAD_CONFIGURATION_AVRO.toBuilder().build()); WriteChannelConfiguration configurationAvro = - LOAD_CONFIGURATION_AVRO - .toBuilder() + LOAD_CONFIGURATION_AVRO.toBuilder() .setDestinationTable(TableId.of("dataset", "newTable")) .build(); assertEquals("newTable", configurationAvro.getDestinationTable().getTable()); @@ -169,7 +181,8 @@ public void testBuilder() { .setMaxBadRecords(MAX_BAD_RECORDS) .setSchemaUpdateOptions(SCHEMA_UPDATE_OPTIONS) .setSchema(TABLE_SCHEMA) - .setAutodetect(AUTODETECT); + .setAutodetect(AUTODETECT) + .setDecimalTargetTypes(DECIMAL_TARGET_TYPES); WriteChannelConfiguration loadConfigurationCSV = builder.build(); assertEquals(TABLE_ID, loadConfigurationCSV.getDestinationTable()); assertEquals(CREATE_DISPOSITION, loadConfigurationCSV.getCreateDisposition()); @@ -182,6 +195,7 @@ public void testBuilder() { assertEquals(TABLE_SCHEMA, loadConfigurationCSV.getSchema()); assertEquals(SCHEMA_UPDATE_OPTIONS, loadConfigurationCSV.getSchemaUpdateOptions()); assertEquals(AUTODETECT, loadConfigurationCSV.getAutodetect()); + assertEquals(DECIMAL_TARGET_TYPES, loadConfigurationCSV.getDecimalTargetTypes()); builder.setFormatOptions(BACKUP_OPTIONS); WriteChannelConfiguration loadConfigurationBackup = builder.build(); assertEquals(BACKUP_OPTIONS, loadConfigurationBackup.getDatastoreBackupOptions()); @@ -225,5 +239,8 @@ private void compareLoadConfiguration( assertEquals(expected.getClustering(), value.getClustering()); assertEquals(expected.getUseAvroLogicalTypes(), value.getUseAvroLogicalTypes()); assertEquals(expected.getLabels(), value.getLabels()); + assertEquals(expected.getDecimalTargetTypes(), value.getDecimalTargetTypes()); + assertEquals(expected.getConnectionProperties(), value.getConnectionProperties()); + assertEquals(expected.getCreateSession(), value.getCreateSession()); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java index dc8b3c3554..439e401d37 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java @@ -19,15 +19,19 @@ import static com.google.cloud.bigquery.JobStatus.State.DONE; import static com.google.common.truth.Truth.assertThat; import static java.lang.System.currentTimeMillis; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - +import static java.net.HttpURLConnection.HTTP_UNAUTHORIZED; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import com.google.api.client.util.IOUtils; import com.google.api.gax.paging.Page; import com.google.auth.oauth2.GoogleCredentials; import com.google.auth.oauth2.ServiceAccountCredentials; @@ -38,66 +42,115 @@ import com.google.cloud.Role; import com.google.cloud.ServiceOptions; import com.google.cloud.bigquery.Acl; +import com.google.cloud.bigquery.Acl.DatasetAclEntity; +import com.google.cloud.bigquery.Acl.Expr; +import com.google.cloud.bigquery.Acl.User; import com.google.cloud.bigquery.BigQuery; -import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption; import com.google.cloud.bigquery.BigQuery.DatasetField; import com.google.cloud.bigquery.BigQuery.DatasetListOption; import com.google.cloud.bigquery.BigQuery.DatasetOption; +import com.google.cloud.bigquery.BigQuery.DatasetUpdateMode; +import com.google.cloud.bigquery.BigQuery.DatasetView; import com.google.cloud.bigquery.BigQuery.JobField; import com.google.cloud.bigquery.BigQuery.JobListOption; import com.google.cloud.bigquery.BigQuery.JobOption; import com.google.cloud.bigquery.BigQuery.TableField; +import com.google.cloud.bigquery.BigQuery.TableMetadataView; import com.google.cloud.bigquery.BigQuery.TableOption; +import com.google.cloud.bigquery.BigQueryDryRunResult; import com.google.cloud.bigquery.BigQueryError; import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.BigQueryResult; +import com.google.cloud.bigquery.BigQueryRetryConfig; +import com.google.cloud.bigquery.BigQuerySQLException; +import com.google.cloud.bigquery.CloneDefinition; import com.google.cloud.bigquery.Clustering; +import com.google.cloud.bigquery.ColumnReference; +import com.google.cloud.bigquery.Connection; import com.google.cloud.bigquery.ConnectionProperty; +import com.google.cloud.bigquery.ConnectionSettings; import com.google.cloud.bigquery.CopyJobConfiguration; +import com.google.cloud.bigquery.CsvOptions; +import com.google.cloud.bigquery.DataFormatOptions; import com.google.cloud.bigquery.Dataset; import com.google.cloud.bigquery.DatasetId; import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.ExecuteSelectResponse; import com.google.cloud.bigquery.ExternalTableDefinition; import com.google.cloud.bigquery.ExtractJobConfiguration; import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Field.Mode; +import com.google.cloud.bigquery.FieldElementType; +import com.google.cloud.bigquery.FieldList; import com.google.cloud.bigquery.FieldValue; +import com.google.cloud.bigquery.FieldValue.Attribute; import com.google.cloud.bigquery.FieldValueList; +import com.google.cloud.bigquery.ForeignKey; import com.google.cloud.bigquery.FormatOptions; import com.google.cloud.bigquery.HivePartitioningOptions; import com.google.cloud.bigquery.InsertAllRequest; +import com.google.cloud.bigquery.InsertAllRequest.RowToInsert; import com.google.cloud.bigquery.InsertAllResponse; import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobConfiguration; +import com.google.cloud.bigquery.JobCreationReason; import com.google.cloud.bigquery.JobId; import com.google.cloud.bigquery.JobInfo; import com.google.cloud.bigquery.JobStatistics; +import com.google.cloud.bigquery.JobStatistics.CopyStatistics; +import com.google.cloud.bigquery.JobStatistics.ExtractStatistics; import com.google.cloud.bigquery.JobStatistics.LoadStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics; +import com.google.cloud.bigquery.JobStatistics.QueryStatistics.StatementType; +import com.google.cloud.bigquery.JobStatistics.SessionInfo; +import com.google.cloud.bigquery.JobStatistics.TransactionInfo; import com.google.cloud.bigquery.LegacySQLTypeName; import com.google.cloud.bigquery.LoadJobConfiguration; import com.google.cloud.bigquery.MaterializedViewDefinition; import com.google.cloud.bigquery.Model; import com.google.cloud.bigquery.ModelId; import com.google.cloud.bigquery.ModelInfo; +import com.google.cloud.bigquery.Parameter; +import com.google.cloud.bigquery.ParquetOptions; import com.google.cloud.bigquery.PolicyTags; +import com.google.cloud.bigquery.PrimaryKey; import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; +import com.google.cloud.bigquery.QueryJobConfiguration.Priority; import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.Range; import com.google.cloud.bigquery.RangePartitioning; import com.google.cloud.bigquery.Routine; import com.google.cloud.bigquery.RoutineArgument; import com.google.cloud.bigquery.RoutineId; import com.google.cloud.bigquery.RoutineInfo; import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.SnapshotTableDefinition; import com.google.cloud.bigquery.StandardSQLDataType; +import com.google.cloud.bigquery.StandardSQLField; +import com.google.cloud.bigquery.StandardSQLTableType; +import com.google.cloud.bigquery.StandardSQLTypeName; import com.google.cloud.bigquery.StandardTableDefinition; import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableConstraints; import com.google.cloud.bigquery.TableDataWriteChannel; import com.google.cloud.bigquery.TableDefinition; import com.google.cloud.bigquery.TableId; import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.TableMetadataCacheUsage.UnusedReason; import com.google.cloud.bigquery.TableResult; import com.google.cloud.bigquery.TimePartitioning; import com.google.cloud.bigquery.TimePartitioning.Type; import com.google.cloud.bigquery.ViewDefinition; import com.google.cloud.bigquery.WriteChannelConfiguration; import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import com.google.cloud.datacatalog.v1.CreatePolicyTagRequest; +import com.google.cloud.datacatalog.v1.CreateTaxonomyRequest; +import com.google.cloud.datacatalog.v1.PolicyTag; +import com.google.cloud.datacatalog.v1.PolicyTagManagerClient; +import com.google.cloud.datacatalog.v1.Taxonomy; +import com.google.cloud.datacatalog.v1.Taxonomy.PolicyType; import com.google.cloud.storage.BlobInfo; import com.google.cloud.storage.BucketInfo; import com.google.cloud.storage.Storage; @@ -109,56 +162,87 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.common.io.BaseEncoding; +import com.google.common.util.concurrent.ListenableFuture; +import com.google.gson.JsonObject; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.common.AttributeKey; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.io.ByteArrayInputStream; import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; import java.math.BigDecimal; import java.nio.ByteBuffer; +import java.nio.channels.Channels; import java.nio.charset.StandardCharsets; import java.nio.file.FileSystems; +import java.nio.file.Path; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Time; +import java.time.Duration; +import java.time.Instant; +import java.time.LocalTime; +import java.time.Period; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; +import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.logging.Level; import java.util.logging.Logger; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.Timeout; -import org.threeten.bp.Duration; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; +import org.threeten.extra.PeriodDuration; -public class ITBigQueryTest { +@Timeout(value = 300) +class ITBigQueryTest { private static final byte[] BYTES = {0xD, 0xE, 0xA, 0xD}; private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES); - private static final Long EXPIRATION_MS = 86400000L; private static final Logger LOG = Logger.getLogger(ITBigQueryTest.class.getName()); private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static final String UK_DATASET = RemoteBigQueryHelper.generateDatasetName(); private static final String DESCRIPTION = "Test dataset"; - private static final String OTHER_DATASET = RemoteBigQueryHelper.generateDatasetName(); private static final String MODEL_DATASET = RemoteBigQueryHelper.generateDatasetName(); private static final String ROUTINE_DATASET = RemoteBigQueryHelper.generateDatasetName(); private static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); private static final String RANDOM_ID = UUID.randomUUID().toString().substring(0, 8); + private static final String STORAGE_BILLING_MODEL = "LOGICAL"; + private static final Long MAX_TIME_TRAVEL_HOURS = 120L; + private static final Long MAX_TIME_TRAVEL_HOURS_DEFAULT = 168L; + private static final Map, Object>> OTEL_ATTRIBUTES = + new HashMap, Object>>(); + private static final Map OTEL_PARENT_SPAN_IDS = new HashMap<>(); + private static final Map OTEL_SPAN_IDS_TO_NAMES = new HashMap<>(); + private static final String OTEL_PARENT_SPAN_ID = "0000000000000000"; private static final String CLOUD_SAMPLES_DATA = Optional.fromNullable(System.getenv("CLOUD_SAMPLES_DATA_BUCKET")).or("cloud-samples-data"); private static final Map LABELS = ImmutableMap.of( "example-label1", "example-value1", "example-label2", "example-value2"); - private static final String sampleTag = - String.format("projects/%s/locations/us/taxonomies/1/policyTags/2", PROJECT_ID); - private static final PolicyTags POLICY_TAGS = - PolicyTags.newBuilder().setNames(ImmutableList.of(sampleTag)).build(); private static final Field TIMESTAMP_FIELD_SCHEMA = Field.newBuilder("TimestampField", LegacySQLTypeName.TIMESTAMP) .setMode(Field.Mode.NULLABLE) @@ -196,6 +280,31 @@ public class ITBigQueryTest { .setMode(Field.Mode.REQUIRED) .setDescription("RecordDescription") .build(); + + private static final Field REPEATED_RECORD_FIELD_SCHEMA = + Field.newBuilder( + "Addresses", + LegacySQLTypeName.RECORD, + Field.newBuilder("Status", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("Address", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("City", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("State", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("Zip", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("NumberOfYears", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()) + .setMode(Field.Mode.REPEATED) + .build(); private static final Field INTEGER_FIELD_SCHEMA = Field.newBuilder("IntegerField", LegacySQLTypeName.INTEGER) .setMode(Field.Mode.NULLABLE) @@ -241,12 +350,6 @@ public class ITBigQueryTest { .setMode(Field.Mode.NULLABLE) .setDescription("BigNumeric4Description") .build(); - private static final Field STRING_FIELD_SCHEMA_WITH_POLICY = - Field.newBuilder("StringFieldWithPolicy", LegacySQLTypeName.STRING) - .setMode(Field.Mode.NULLABLE) - .setDescription("field has a policy") - .setPolicyTags(POLICY_TAGS) - .build(); private static final Schema TABLE_SCHEMA = Schema.of( TIMESTAMP_FIELD_SCHEMA, @@ -265,6 +368,103 @@ public class ITBigQueryTest { BIGNUMERIC_FIELD_SCHEMA3, BIGNUMERIC_FIELD_SCHEMA4); + private static final Schema BQ_RESULTSET_SCHEMA = + Schema.of( + Field.newBuilder("TimestampField", StandardSQLTypeName.TIMESTAMP) + .setMode(Field.Mode.NULLABLE) + .setDescription("TimestampDescription") + .build(), + Field.newBuilder("StringField", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("StringDescription") + .build(), + Field.newBuilder("IntegerArrayField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.REPEATED) + .setDescription("IntegerArrayDescription") + .build(), + Field.newBuilder("BooleanField", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .setDescription("BooleanDescription") + .build(), + Field.newBuilder("BytesField", StandardSQLTypeName.BYTES) + .setMode(Field.Mode.NULLABLE) + .setDescription("BytesDescription") + .build(), + Field.newBuilder( + "RecordField", + StandardSQLTypeName.STRUCT, + Field.newBuilder("TimestampField", StandardSQLTypeName.TIMESTAMP) + .setMode(Field.Mode.NULLABLE) + .setDescription("TimestampDescription") + .build(), + Field.newBuilder("StringField", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("StringDescription") + .build(), + Field.newBuilder("IntegerArrayField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.REPEATED) + .setDescription("IntegerArrayDescription") + .build(), + Field.newBuilder("BooleanField", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .setDescription("BooleanDescription") + .build(), + Field.newBuilder("BytesField", StandardSQLTypeName.BYTES) + .setMode(Field.Mode.NULLABLE) + .setDescription("BytesDescription") + .build()) + .setMode(Field.Mode.REQUIRED) + .setDescription("RecordDescription") + .build(), + Field.newBuilder("IntegerField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("IntegerDescription") + .build(), + Field.newBuilder("FloatField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("FloatDescription") + .build(), + Field.newBuilder("GeographyField", StandardSQLTypeName.GEOGRAPHY) + .setMode(Field.Mode.NULLABLE) + .setDescription("GeographyDescription") + .build(), + Field.newBuilder("NumericField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("NumericDescription") + .build(), + Field.newBuilder("BigNumericField", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("BigNumericDescription") + .build(), + Field.newBuilder("BigNumericField1", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("BigNumeric1Description") + .build(), + Field.newBuilder("BigNumericField2", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("BigNumeric2Description") + .build(), + Field.newBuilder("BigNumericField3", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("BigNumeric3Description") + .build(), + Field.newBuilder("BigNumericField4", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("BigNumeric4Description") + .build(), + Field.newBuilder("TimeField", StandardSQLTypeName.TIME) + .setMode(Field.Mode.NULLABLE) + .setDescription("TimeDescription") + .build(), + Field.newBuilder("DateField", StandardSQLTypeName.DATE) + .setMode(Field.Mode.NULLABLE) + .setDescription("DateDescription") + .build(), + Field.newBuilder("DateTimeField", StandardSQLTypeName.DATETIME) + .setMode(Field.Mode.NULLABLE) + .setDescription("DateTimeDescription") + .build()); + private static final Field DDL_TIMESTAMP_FIELD_SCHEMA = Field.newBuilder("TimestampField", LegacySQLTypeName.TIMESTAMP) .setDescription("TimestampDescription") @@ -277,7 +477,7 @@ public class ITBigQueryTest { Field.newBuilder("BooleanField", LegacySQLTypeName.BOOLEAN) .setDescription("BooleanDescription") .build(); - private static final Schema DDL_TABLE_SCHEMA = + private static final Schema SIMPLE_TABLE_SCHEMA = Schema.of(DDL_TIMESTAMP_FIELD_SCHEMA, DDL_STRING_FIELD_SCHEMA, DDL_BOOLEAN_FIELD_SCHEMA); private static final Schema LARGE_TABLE_SCHEMA = Schema.of( @@ -296,9 +496,19 @@ public class ITBigQueryTest { .setMode(Field.Mode.NULLABLE) .build()); + private static final Schema REPEATED_RECORD_TABLE_SCHEMA = + Schema.of( + Field.newBuilder("ID", LegacySQLTypeName.STRING).setMode(Field.Mode.NULLABLE).build(), + Field.newBuilder("FirstName", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("LastName", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("DOB", LegacySQLTypeName.DATE).setMode(Field.Mode.NULLABLE).build(), + REPEATED_RECORD_FIELD_SCHEMA); + private static final Schema SIMPLE_SCHEMA = Schema.of(STRING_FIELD_SCHEMA); - private static final Schema POLICY_SCHEMA = - Schema.of(STRING_FIELD_SCHEMA, STRING_FIELD_SCHEMA_WITH_POLICY, INTEGER_FIELD_SCHEMA); private static final Schema QUERY_RESULT_SCHEMA = Schema.of( Field.newBuilder("TimestampField", LegacySQLTypeName.TIMESTAMP) @@ -310,6 +520,55 @@ public class ITBigQueryTest { Field.newBuilder("BooleanField", LegacySQLTypeName.BOOLEAN) .setMode(Field.Mode.NULLABLE) .build()); + + private static final Schema BQ_RESULTSET_EXPECTED_SCHEMA = + Schema.of( + Field.newBuilder("StringField", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("BigNumericField", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("BooleanField", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("BytesField", StandardSQLTypeName.BYTES) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("IntegerField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("TimestampField", StandardSQLTypeName.TIMESTAMP) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("FloatField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("NumericField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("TimeField", StandardSQLTypeName.TIME) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("DateField", StandardSQLTypeName.DATE) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("DateTimeField", StandardSQLTypeName.DATETIME) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("GeographyField", StandardSQLTypeName.GEOGRAPHY) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("BytesField_1", StandardSQLTypeName.BYTES) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("BooleanField_1", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("IntegerArrayField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.REPEATED) + .build()); + private static final Schema QUERY_RESULT_SCHEMA_BIGNUMERIC = Schema.of( Field.newBuilder("TimestampField", LegacySQLTypeName.TIMESTAMP) @@ -353,16 +612,29 @@ public class ITBigQueryTest { RangePartitioning.newBuilder().setField("IntegerField").setRange(RANGE).build(); private static final String LOAD_FILE = "load.csv"; private static final String LOAD_FILE_LARGE = "load_large.csv"; + + private static final String LOAD_FILE_FLEXIBLE_COLUMN_NAME = "load_flexible_column_name.csv"; + private static final String LOAD_FILE_NULL = "load_null.csv"; private static final String JSON_LOAD_FILE = "load.json"; + private static final String JSON_LOAD_FILE_BQ_RESULTSET = "load_bq_resultset.json"; private static final String JSON_LOAD_FILE_SIMPLE = "load_simple.json"; private static final String EXTRACT_FILE = "extract.csv"; private static final String EXTRACT_MODEL_FILE = "extract_model.csv"; private static final String BUCKET = RemoteStorageHelper.generateBucketName(); - private static final TableId TABLE_ID = TableId.of(DATASET, "testing_table"); - private static final TableId TABLE_ID_DDL = TableId.of(DATASET, "ddl_testing_table"); - private static final TableId TABLE_ID_FASTQUERY = TableId.of(DATASET, "fastquery_testing_table"); - private static final TableId TABLE_ID_LARGE = TableId.of(DATASET, "large_data_testing_table"); + private static final TableId TABLE_ID = TableId.of(DATASET, generateRandomName("testing_table")); + private static final TableId TABLE_ID_SIMPLE = + TableId.of(DATASET, generateRandomName("ddl_testing_table")); + private static final TableId TABLE_ID_FAST_QUERY = + TableId.of(DATASET, generateRandomName("fast_query_testing_table")); + private static final TableId TABLE_ID_LARGE = + TableId.of(DATASET, generateRandomName("large_data_testing_table")); + private static final TableId TABLE_ID_FAST_QUERY_BQ_RESULTSET = + TableId.of(DATASET, generateRandomName("fast_query_testing_bq_resultset")); + private static final String CSV_CONTENT = "StringValue1\nStringValue2\n"; + private static final String CSV_CONTENT_NULL = "String\0Value1\n"; + private static final String CSV_CONTENT_FLEXIBLE_COLUMN = "name,&ersand\nrow_name,1"; + private static final String JSON_CONTENT = "{" + " \"TimestampField\": \"2014-08-19 07:41:35.220 -05:00\"," @@ -418,6 +690,64 @@ public class ITBigQueryTest { + " \"BigNumericField3\": \"578960446186580977117854925043439539266.34992332820282019728792003956564819967\"," + " \"BigNumericField4\": \"-578960446186580977117854925043439539266.34992332820282019728792003956564819968\"" + "}"; + + private static final String JSON_CONTENT_BQ_RESULTSET = + "{" + + " \"TimestampField\": null," + + " \"StringField\": null," + + " \"IntegerArrayField\": null," + + " \"BooleanField\": null," + + " \"BytesField\": null," + + " \"RecordField\": {" + + " \"TimestampField\": null," + + " \"StringField\": null," + + " \"IntegerArrayField\": null," + + " \"BooleanField\": null," + + " \"BytesField\": null" + + " }," + + " \"IntegerField\": null," + + " \"FloatField\": null," + + " \"GeographyField\": null," + + " \"NumericField\": null," + + " \"BigNumericField\": null," + + " \"BigNumericField1\": null," + + " \"BigNumericField2\": null," + + " \"BigNumericField3\": null," + + " \"BigNumericField4\": null," + + " \"TimeField\": null," + + " \"DateField\": null," + + " \"DateTimeField\": null" + + "}\n" + + "{" + + " \"TimestampField\": \"2018-08-19 12:11:35.123456 UTC\"," + + " \"StringField\": \"StringValue1\"," + + " \"IntegerArrayField\": [1,2,3,4]," + + " \"BooleanField\": \"false\"," + + " \"BytesField\": \"" + + BYTES_BASE64 + + "\"," + + " \"RecordField\": {" + + " \"TimestampField\": \"1969-07-20 20:18:04 UTC\"," + + " \"StringField\": null," + + " \"IntegerArrayField\": [1,0]," + + " \"BooleanField\": \"true\"," + + " \"BytesField\": \"" + + BYTES_BASE64 + + "\"" + + " }," + + " \"IntegerField\": \"1\"," + + " \"FloatField\": \"10.1\"," + + " \"GeographyField\": \"POINT(-122.35022 47.649154)\"," + + " \"NumericField\": \"100\"," + + " \"BigNumericField\": \"0.33333333333333333333333333333333333333\"," + + " \"BigNumericField1\": \"1e-38\"," + + " \"BigNumericField2\": \"-1e38\"," + + " \"BigNumericField3\": \"578960446186580977117854925043439539266.34992332820282019728792003956564819967\"," + + " \"BigNumericField4\": \"-578960446186580977117854925043439539266.34992332820282019728792003956564819968\"," + + " \"TimeField\": \"12:11:35.123456\"," + + " \"DateField\": \"2018-08-19\"," + + " \"DateTimeField\": \"2018-08-19 12:11:35.123456\"" + + "}"; private static final String JSON_CONTENT_SIMPLE = "{" + " \"TimestampField\": \"2014-08-19 07:41:35.220 -05:00\"," @@ -436,28 +766,335 @@ public class ITBigQueryTest { private static final List CONNECTION_PROPERTIES = ImmutableList.of(CONNECTION_PROPERTY); + private static final Field ID_SCHEMA = + Field.newBuilder("id", LegacySQLTypeName.STRING) + .setMode(Mode.REQUIRED) + .setDescription("id") + .build(); + private static final Field FIRST_NAME_SCHEMA = + Field.newBuilder("firstname", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("First Name") + .build(); + private static final Field LAST_NAME_SCHEMA = + Field.newBuilder("lastname", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("LAST NAME") + .build(); + private static final Field EMAIL_SCHEMA = + Field.newBuilder("email", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("email") + .build(); + private static final Field PROFESSION_SCHEMA = + Field.newBuilder("profession", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("profession") + .build(); + private static final Schema SESSION_TABLE_SCHEMA = + Schema.of(ID_SCHEMA, FIRST_NAME_SCHEMA, LAST_NAME_SCHEMA, EMAIL_SCHEMA, PROFESSION_SCHEMA); + + private static final Schema CONSTRAINTS_TABLE_SCHEMA = + Schema.of( + Field.newBuilder("ID", LegacySQLTypeName.STRING).setMode(Mode.REQUIRED).build(), + Field.newBuilder("FirstName", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("LastName", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + private static final Path csvPath = + FileSystems.getDefault().getPath("src/test/resources", "sessionTest.csv").toAbsolutePath(); + private static final Set PUBLIC_DATASETS = ImmutableSet.of("github_repos", "hacker_news", "noaa_gsod", "samples", "usa_names"); + private static final Map PUBLIC_DATASETS_LOCATION = + ImmutableMap.builder() + .put("github_repos", "US") + .put("hacker_news", "US") + .put("noaa_gsod", "US") + .put("samples", "US") + .put("usa_names", "US") + // Dataset url: + // https://console.cloud.google.com/bigquery?project=bigquery-public-data&ws=!1m4!1m3!3m2!1sbigquery-public-data!2sgnomAD_asiane1 + .put("gnomAD_asiane1", "asia-northeast1") + // Dataset url: + // https://console.cloud.google.com/bigquery?project=bigquery-public-data&ws=!1m4!1m3!3m2!1sbigquery-public-data!2sgnomAD_eu + .put("gnomAD_eu", "EU") + .build(); private static final String PUBLIC_PROJECT = "bigquery-public-data"; private static final String PUBLIC_DATASET = "census_bureau_international"; + private static final String FAKE_JSON_CRED_WITH_GOOGLE_DOMAIN = + "{\n" + + " \"private_key_id\": \"somekeyid\",\n" + + " \"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggS" + + "kAgEAAoIBAQC+K2hSuFpAdrJI\\nnCgcDz2M7t7bjdlsadsasad+fvRSW6TjNQZ3p5LLQY1kSZRqBqylRkzteMOyHg" + + "aR\\n0Pmxh3ILCND5men43j3h4eDbrhQBuxfEMalkG92sL+PNQSETY2tnvXryOvmBRwa/\\nQP/9dJfIkIDJ9Fw9N4" + + "Bhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nknddadwkwewcVxHFhcZJO+XWf6ofLUXpRwiTZakGMn8EE1uVa2" + + "LgczOjwWHGi99MFjxSer5m9\\n1tCa3/KEGKiS/YL71JvjwX3mb+cewlkcmweBKZHM2JPTk0ZednFSpVZMtycjkbLa" + + "\\ndYOS8V85AgMBewECggEBAKksaldajfDZDV6nGqbFjMiizAKJolr/M3OQw16K6o3/\\n0S31xIe3sSlgW0+UbYlF" + + "4U8KifhManD1apVSC3csafaspP4RZUHFhtBywLO9pR5c\\nr6S5aLp+gPWFyIp1pfXbWGvc5VY/v9x7ya1VEa6rXvL" + + "sKupSeWAW4tMj3eo/64ge\\nsdaceaLYw52KeBYiT6+vpsnYrEkAHO1fF/LavbLLOFJmFTMxmsNaG0tuiJHgjshB\\" + + "n82DpMCbXG9YcCgI/DbzuIjsdj2JC1cascSP//3PmefWysucBQe7Jryb6NQtASmnv\\nCdDw/0jmZTEjpe4S1lxfHp" + + "lAhHFtdgYTvyYtaLZiVVkCgYEA8eVpof2rceecw/I6\\n5ng1q3Hl2usdWV/4mZMvR0fOemacLLfocX6IYxT1zA1FF" + + "JlbXSRsJMf/Qq39mOR2\\nSpW+hr4jCoHeRVYLgsbggtrevGmILAlNoqCMpGZ6vDmJpq6ECV9olliDvpPgWOP+\\nm" + + "YPDreFBGxWvQrADNbRt2dmGsrsCgYEAyUHqB2wvJHFqdmeBsaacewzV8x9WgmeX\\ngUIi9REwXlGDW0Mz50dxpxcK" + + "CAYn65+7TCnY5O/jmL0VRxU1J2mSWyWTo1C+17L0\\n3fUqjxL1pkefwecxwecvC+gFFYdJ4CQ/MHHXU81Lwl1iWdF" + + "Cd2UoGddYaOF+KNeM\\nHC7cmqra+JsCgYEAlUNywzq8nUg7282E+uICfCB0LfwejuymR93CtsFgb7cRd6ak\\nECR" + + "8FGfCpH8ruWJINllbQfcHVCX47ndLZwqv3oVFKh6pAS/vVI4dpOepP8++7y1u\\ncoOvtreXCX6XqfrWDtKIvv0vjl" + + "HBhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nkndj5uNl5SiuVxHFhcZJO+XWf6ofLUregtevZakGMn8EE1uVa" + + "2AY7eafmoU/nZPT\\n00YB0TBATdCbn/nBSuKDESkhSg9s2GEKQZG5hBmL5uCMfo09z3SfxZIhJdlerreP\\nJ7gSi" + + "dI12N+EZxYd4xIJh/HFDgp7RRO87f+WJkofMQKBgGTnClK1VMaCRbJZPriw\\nEfeFCoOX75MxKwXs6xgrw4W//AYG" + + "GUjDt83lD6AZP6tws7gJ2IwY/qP7+lyhjEqN\\nHtfPZRGFkGZsdaksdlaksd323423d+15/UvrlRSFPNj1tWQmNKk" + + "XyRDW4IG1Oa2p\\nrALStNBx5Y9t0/LQnFI4w3aG\\n-----END PRIVATE KEY-----\\n\",\n" + + " \"project_id\": \"someprojectid\",\n" + + " \"client_email\": \"someclientid@developer.gserviceaccount.com\",\n" + + " \"client_id\": \"someclientid.apps.googleusercontent.com\",\n" + + " \"type\": \"service_account\",\n" + + " \"universe_domain\": \"googleapis.com\"\n" + + "}"; + private static final String FAKE_JSON_CRED_WITH_INVALID_DOMAIN = + "{\n" + + " \"private_key_id\": \"somekeyid\",\n" + + " \"private_key\": \"-----BEGIN PRIVATE KEY-----\\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggS" + + "kAgEAAoIBAQC+K2hSuFpAdrJI\\nnCgcDz2M7t7bjdlsadsasad+fvRSW6TjNQZ3p5LLQY1kSZRqBqylRkzteMOyHg" + + "aR\\n0Pmxh3ILCND5men43j3h4eDbrhQBuxfEMalkG92sL+PNQSETY2tnvXryOvmBRwa/\\nQP/9dJfIkIDJ9Fw9N4" + + "Bhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nknddadwkwewcVxHFhcZJO+XWf6ofLUXpRwiTZakGMn8EE1uVa2" + + "LgczOjwWHGi99MFjxSer5m9\\n1tCa3/KEGKiS/YL71JvjwX3mb+cewlkcmweBKZHM2JPTk0ZednFSpVZMtycjkbLa" + + "\\ndYOS8V85AgMBewECggEBAKksaldajfDZDV6nGqbFjMiizAKJolr/M3OQw16K6o3/\\n0S31xIe3sSlgW0+UbYlF" + + "4U8KifhManD1apVSC3csafaspP4RZUHFhtBywLO9pR5c\\nr6S5aLp+gPWFyIp1pfXbWGvc5VY/v9x7ya1VEa6rXvL" + + "sKupSeWAW4tMj3eo/64ge\\nsdaceaLYw52KeBYiT6+vpsnYrEkAHO1fF/LavbLLOFJmFTMxmsNaG0tuiJHgjshB\\" + + "n82DpMCbXG9YcCgI/DbzuIjsdj2JC1cascSP//3PmefWysucBQe7Jryb6NQtASmnv\\nCdDw/0jmZTEjpe4S1lxfHp" + + "lAhHFtdgYTvyYtaLZiVVkCgYEA8eVpof2rceecw/I6\\n5ng1q3Hl2usdWV/4mZMvR0fOemacLLfocX6IYxT1zA1FF" + + "JlbXSRsJMf/Qq39mOR2\\nSpW+hr4jCoHeRVYLgsbggtrevGmILAlNoqCMpGZ6vDmJpq6ECV9olliDvpPgWOP+\\nm" + + "YPDreFBGxWvQrADNbRt2dmGsrsCgYEAyUHqB2wvJHFqdmeBsaacewzV8x9WgmeX\\ngUIi9REwXlGDW0Mz50dxpxcK" + + "CAYn65+7TCnY5O/jmL0VRxU1J2mSWyWTo1C+17L0\\n3fUqjxL1pkefwecxwecvC+gFFYdJ4CQ/MHHXU81Lwl1iWdF" + + "Cd2UoGddYaOF+KNeM\\nHC7cmqra+JsCgYEAlUNywzq8nUg7282E+uICfCB0LfwejuymR93CtsFgb7cRd6ak\\nECR" + + "8FGfCpH8ruWJINllbQfcHVCX47ndLZwqv3oVFKh6pAS/vVI4dpOepP8++7y1u\\ncoOvtreXCX6XqfrWDtKIvv0vjl" + + "HBhhhp6mCcRpdQjV38H7JsyJ7lih/oNjECgYAt\\nkndj5uNl5SiuVxHFhcZJO+XWf6ofLUregtevZakGMn8EE1uVa" + + "2AY7eafmoU/nZPT\\n00YB0TBATdCbn/nBSuKDESkhSg9s2GEKQZG5hBmL5uCMfo09z3SfxZIhJdlerreP\\nJ7gSi" + + "dI12N+EZxYd4xIJh/HFDgp7RRO87f+WJkofMQKBgGTnClK1VMaCRbJZPriw\\nEfeFCoOX75MxKwXs6xgrw4W//AYG" + + "GUjDt83lD6AZP6tws7gJ2IwY/qP7+lyhjEqN\\nHtfPZRGFkGZsdaksdlaksd323423d+15/UvrlRSFPNj1tWQmNKk" + + "XyRDW4IG1Oa2p\\nrALStNBx5Y9t0/LQnFI4w3aG\\n-----END PRIVATE KEY-----\\n\",\n" + + " \"project_id\": \"someprojectid\",\n" + + " \"client_email\": \"someclientid@developer.gserviceaccount.com\",\n" + + " \"client_id\": \"someclientid.apps.googleusercontent.com\",\n" + + " \"type\": \"service_account\",\n" + + " \"universe_domain\": \"fake.domain\"\n" + + "}"; + + private static final Schema RANGE_SCHEMA = + Schema.of( + Field.newBuilder("name", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("Name of the row") + .build(), + Field.newBuilder("date", StandardSQLTypeName.RANGE) + .setMode(Field.Mode.NULLABLE) + .setDescription("Range field with DATE") + .setRangeElementType(FieldElementType.newBuilder().setType("DATE").build()) + .build(), + Field.newBuilder("datetime", StandardSQLTypeName.RANGE) + .setMode(Field.Mode.NULLABLE) + .setDescription("Range field with DATETIME") + .setRangeElementType(FieldElementType.newBuilder().setType("DATETIME").build()) + .build(), + Field.newBuilder("timestamp", StandardSQLTypeName.RANGE) + .setMode(Field.Mode.NULLABLE) + .setDescription("Range field with TIMESTAMP") + .setRangeElementType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()); + + private static final ImmutableMap RANGE_TEST_VALUES_DATES = + new ImmutableMap.Builder() + .put( + "bounded", + Range.newBuilder() + .setStart("2020-01-01") + .setEnd("2020-12-31") + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .build()) + .put( + "unboundedStart", + Range.newBuilder() + .setStart(null) + .setEnd("2020-12-31") + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .build()) + .put( + "unboundedEnd", + Range.newBuilder() + .setStart("2020-01-01") + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .build()) + .put( + "unbounded", + Range.newBuilder() + .setStart(null) + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("DATE").build()) + .build()) + .build(); + + private static final ImmutableMap RANGE_TEST_VALUES_DATETIME = + new ImmutableMap.Builder() + .put( + "bounded", + Range.newBuilder() + .setStart("2014-08-19T05:41:35.220000") + .setEnd("2015-09-20T06:41:35.220000") + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .build()) + .put( + "unboundedStart", + Range.newBuilder() + .setStart(null) + .setEnd("2015-09-20T06:41:35.220000") + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .build()) + .put( + "unboundedEnd", + Range.newBuilder() + .setStart("2014-08-19T05:41:35.220000") + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .build()) + .put( + "unbounded", + Range.newBuilder() + .setStart(null) + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("DATETIME").build()) + .build()) + .build(); + + private static final ImmutableMap RANGE_TEST_VALUES_TIMESTAMP = + new ImmutableMap.Builder() + .put( + "bounded", + Range.newBuilder() + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd("2015-09-20 13:41:35.220000+01:00") + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unboundedStart", + Range.newBuilder() + .setStart(null) + .setEnd("2015-09-20 13:41:35.220000+01:00") + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unboundedEnd", + Range.newBuilder() + .setStart("2014-08-19 12:41:35.220000+00:00") + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unbounded", + Range.newBuilder() + .setStart(null) + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .build(); + + // timestamps are returned as seconds since epoch + private static final ImmutableMap RANGE_TEST_VALUES_EXPECTED_TIMESTAMP = + new ImmutableMap.Builder() + .put( + "bounded", + Range.newBuilder() + .setStart("1408452095.220000") + .setEnd("1442752895.220000") + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unboundedStart", + Range.newBuilder() + .setStart(null) + .setEnd("1442752895.220000") + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unboundedEnd", + Range.newBuilder() + .setStart("1408452095.220000") + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .put( + "unbounded", + Range.newBuilder() + .setStart(null) + .setEnd(null) + .setType(FieldElementType.newBuilder().setType("TIMESTAMP").build()) + .build()) + .build(); + private static BigQuery bigquery; private static Storage storage; + private static OpenTelemetry otel; + + private static class TestSpanExporter implements io.opentelemetry.sdk.trace.export.SpanExporter { + @Override + public CompletableResultCode export(Collection collection) { + if (collection.isEmpty()) { + return CompletableResultCode.ofFailure(); + } + for (SpanData data : collection) { + OTEL_ATTRIBUTES.put(data.getName(), data.getAttributes().asMap()); + OTEL_PARENT_SPAN_IDS.put(data.getName(), data.getParentSpanId()); + OTEL_SPAN_IDS_TO_NAMES.put(data.getSpanId(), data.getName()); + } + return CompletableResultCode.ofSuccess(); + } - @Rule public Timeout globalTimeout = Timeout.seconds(300); + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } - @BeforeClass - public static void beforeClass() throws InterruptedException, IOException { + private static String generateRandomName(String prefix) { + return prefix + UUID.randomUUID().toString().substring(0, 8); + } + + @BeforeAll + static void beforeClass() throws InterruptedException, IOException { RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); RemoteStorageHelper storageHelper = RemoteStorageHelper.create(); Map labels = ImmutableMap.of("test-job-name", "test-load-job"); + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.create(new TestSpanExporter())) + .setSampler(Sampler.alwaysOn()) + .build(); + otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).buildAndRegisterGlobal(); + bigquery = bigqueryHelper.getOptions().getService(); storage = storageHelper.getOptions().getService(); storage.create(BucketInfo.of(BUCKET)); storage.create( BlobInfo.newBuilder(BUCKET, LOAD_FILE).setContentType("text/plain").build(), CSV_CONTENT.getBytes(StandardCharsets.UTF_8)); + storage.create( + BlobInfo.newBuilder(BUCKET, LOAD_FILE_NULL).setContentType("text/plain").build(), + CSV_CONTENT_NULL.getBytes(StandardCharsets.UTF_8)); + storage.create( + BlobInfo.newBuilder(BUCKET, LOAD_FILE_FLEXIBLE_COLUMN_NAME) + .setContentType("text/plain") + .build(), + CSV_CONTENT_FLEXIBLE_COLUMN.getBytes(StandardCharsets.UTF_8)); storage.create( BlobInfo.newBuilder(BUCKET, JSON_LOAD_FILE).setContentType("application/json").build(), JSON_CONTENT.getBytes(StandardCharsets.UTF_8)); @@ -466,9 +1103,17 @@ public static void beforeClass() throws InterruptedException, IOException { .setContentType("application/json") .build(), JSON_CONTENT_SIMPLE.getBytes(StandardCharsets.UTF_8)); - storage.createFrom( - BlobInfo.newBuilder(BUCKET, LOAD_FILE_LARGE).setContentType("text/plain").build(), - FileSystems.getDefault().getPath("src/test/resources", "QueryTestData.csv")); + try (InputStream stream = + ITBigQueryTest.class.getClassLoader().getResourceAsStream("QueryTestData.csv")) { + storage.createFrom( + BlobInfo.newBuilder(BUCKET, LOAD_FILE_LARGE).setContentType("text/plain").build(), + stream); + } + storage.create( + BlobInfo.newBuilder(BUCKET, JSON_LOAD_FILE_BQ_RESULTSET) + .setContentType("application/json") + .build(), + JSON_CONTENT_BQ_RESULTSET.getBytes(StandardCharsets.UTF_8)); DatasetInfo info = DatasetInfo.newBuilder(DATASET).setDescription(DESCRIPTION).setLabels(LABELS).build(); bigquery.create(info); @@ -478,6 +1123,7 @@ public static void beforeClass() throws InterruptedException, IOException { DatasetInfo info3 = DatasetInfo.newBuilder(ROUTINE_DATASET).setDescription("java routine lifecycle").build(); bigquery.create(info3); + LoadJobConfiguration configuration = LoadJobConfiguration.newBuilder( TABLE_ID, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) @@ -493,7 +1139,7 @@ public static void beforeClass() throws InterruptedException, IOException { LoadJobConfiguration configurationFastQuery = LoadJobConfiguration.newBuilder( - TABLE_ID_FASTQUERY, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) + TABLE_ID_FAST_QUERY, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) .setSchema(TABLE_SCHEMA) .setLabels(labels) @@ -502,11 +1148,26 @@ public static void beforeClass() throws InterruptedException, IOException { jobFastQuery = jobFastQuery.waitFor(); assertNull(jobFastQuery.getStatus().getError()); + LoadJobConfiguration configFastQueryBQResultset = + LoadJobConfiguration.newBuilder( + TABLE_ID_FAST_QUERY_BQ_RESULTSET, + "gs://" + BUCKET + "/" + JSON_LOAD_FILE_BQ_RESULTSET, + FormatOptions.json()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setSchema(BQ_RESULTSET_SCHEMA) + .setLabels(labels) + .build(); + Job jobFastQueryBQResultSet = bigquery.create(JobInfo.of(configFastQueryBQResultset)); + jobFastQueryBQResultSet = jobFastQueryBQResultSet.waitFor(); + assertNull(jobFastQueryBQResultSet.getStatus().getError()); + LoadJobConfiguration configurationDDL = LoadJobConfiguration.newBuilder( - TABLE_ID_DDL, "gs://" + BUCKET + "/" + JSON_LOAD_FILE_SIMPLE, FormatOptions.json()) + TABLE_ID_SIMPLE, + "gs://" + BUCKET + "/" + JSON_LOAD_FILE_SIMPLE, + FormatOptions.json()) .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) - .setSchema(DDL_TABLE_SCHEMA) + .setSchema(SIMPLE_TABLE_SCHEMA) .setLabels(labels) .build(); Job jobDDL = bigquery.create(JobInfo.of(configurationDDL)); @@ -525,10 +1186,11 @@ public static void beforeClass() throws InterruptedException, IOException { assertNull(jobLargeTable.getStatus().getError()); } - @AfterClass - public static void afterClass() throws ExecutionException, InterruptedException { + @AfterAll + static void afterClass() throws Exception { if (bigquery != null) { RemoteBigQueryHelper.forceDelete(bigquery, DATASET); + RemoteBigQueryHelper.forceDelete(bigquery, UK_DATASET); RemoteBigQueryHelper.forceDelete(bigquery, MODEL_DATASET); RemoteBigQueryHelper.forceDelete(bigquery, ROUTINE_DATASET); } @@ -537,40 +1199,59 @@ public static void afterClass() throws ExecutionException, InterruptedException if (!wasDeleted && LOG.isLoggable(Level.WARNING)) { LOG.log(Level.WARNING, "Deletion of bucket {0} timed out, bucket is not empty", BUCKET); } + storage.close(); + } + + if (otel instanceof OpenTelemetrySdk) { + ((OpenTelemetrySdk) otel).close(); + } + } + + static GoogleCredentials loadCredentials(String credentialFile) { + try (InputStream keyStream = new ByteArrayInputStream(credentialFile.getBytes())) { + return GoogleCredentials.fromStream(keyStream); + } catch (IOException e) { + throw new RuntimeException("Couldn't create fake JSON credentials.", e); } } @Test - public void testListDatasets() { + void testListDatasets() { Page datasets = bigquery.listDatasets("bigquery-public-data"); Iterator iterator = datasets.iterateAll().iterator(); Set datasetNames = new HashSet<>(); + Map datasetLocation = new HashMap<>(); while (iterator.hasNext()) { - datasetNames.add(iterator.next().getDatasetId().getDataset()); + Dataset dataset = iterator.next(); + String name = dataset.getDatasetId().getDataset(); + datasetNames.add(name); + datasetLocation.put(name, dataset.getLocation()); } for (String type : PUBLIC_DATASETS) { assertTrue(datasetNames.contains(type)); + assertEquals(PUBLIC_DATASETS_LOCATION.get(type), datasetLocation.get(type)); } } @Test - public void testListDatasetsWithFilter() { + void testListDatasetsWithFilter() { String labelFilter = "labels.example-label1:example-value1"; Page datasets = bigquery.listDatasets(DatasetListOption.labelFilter(labelFilter)); int count = 0; for (Dataset dataset : datasets.getValues()) { assertTrue( - "failed to find label key in dataset", dataset.getLabels().containsKey("example-label1")); - assertTrue( - "failed to find label value in dataset", - dataset.getLabels().get("example-label1").equals("example-value1")); + dataset.getLabels().containsKey("example-label1"), "failed to find label key in dataset"); + assertEquals( + "example-value1", + dataset.getLabels().get("example-label1"), + "failed to find label value in dataset"); count++; } assertTrue(count > 0); } @Test - public void testGetDataset() { + void testGetDataset() { Dataset dataset = bigquery.getDataset(DATASET); assertEquals(bigquery.getOptions().getProjectId(), dataset.getDatasetId().getProject()); assertEquals(DATASET, dataset.getDatasetId().getDataset()); @@ -584,7 +1265,7 @@ public void testGetDataset() { } @Test - public void testDatasetUpdateAccess() throws IOException { + void testDatasetUpdateAccess() throws IOException { Dataset dataset = bigquery.getDataset(DATASET); ServiceAccountCredentials credentials = (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); @@ -599,7 +1280,7 @@ public void testDatasetUpdateAccess() throws IOException { } @Test - public void testGetDatasetWithSelectedFields() { + void testGetDatasetWithSelectedFields() { Dataset dataset = bigquery.getDataset( DATASET, DatasetOption.fields(DatasetField.CREATION_TIME, DatasetField.LABELS)); @@ -616,73 +1297,188 @@ public void testGetDatasetWithSelectedFields() { assertNull(dataset.getLastModified()); assertNull(dataset.getLocation()); assertNull(dataset.getSelfLink()); + assertNull(dataset.getStorageBillingModel()); + assertNull(dataset.getMaxTimeTravelHours()); } @Test - public void testUpdateDataset() { + void testGetDatasetWithAccessPolicyVersion() throws IOException { + String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); + ServiceAccountCredentials credentials = + (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); + User user = new User(credentials.getClientEmail()); + Acl.Role role = Acl.Role.WRITER; + Acl.Expr condition = + new Expr( + "request.time > timestamp('2024-01-01T00:00:00Z')", + "test condition", + "requests after the year 2024", + "location"); + Acl acl = Acl.of(user, role, condition); + DatasetOption accessPolicyOption = DatasetOption.accessPolicyVersion(3); + DatasetOption viewOption = DatasetOption.datasetView(DatasetView.FULL); + Dataset dataset = bigquery.create( - DatasetInfo.newBuilder(OTHER_DATASET) + DatasetInfo.newBuilder(accessPolicyDataset) .setDescription("Some Description") - .setLabels(Collections.singletonMap("a", "b")) - .build()); + .setAcl(ImmutableList.of(acl)) + .build(), + accessPolicyOption); assertThat(dataset).isNotNull(); - assertThat(dataset.getDatasetId().getProject()).isEqualTo(bigquery.getOptions().getProjectId()); - assertThat(dataset.getDatasetId().getDataset()).isEqualTo(OTHER_DATASET); - assertThat(dataset.getDescription()).isEqualTo("Some Description"); - assertThat(dataset.getLabels()).containsExactly("a", "b"); - Map updateLabels = new HashMap<>(); - updateLabels.put("x", "y"); - updateLabels.put("a", null); - Dataset updatedDataset = - bigquery.update( - dataset - .toBuilder() - .setDescription("Updated Description") - .setLabels(updateLabels) + Dataset remoteDataset = + bigquery.getDataset(accessPolicyDataset, accessPolicyOption, viewOption); + assertNotNull(remoteDataset); + assertEquals(dataset.getDescription(), remoteDataset.getDescription()); + assertNotNull(remoteDataset.getCreationTime()); + + Acl remoteAclWithCond = null; + for (Acl remoteAcl : remoteDataset.getAcl()) { + if (remoteAcl.getCondition() != null) { + remoteAclWithCond = remoteAcl; + } + } + assertNotNull(remoteAclWithCond); + assertEquals(remoteAclWithCond.getCondition(), condition); + + RemoteBigQueryHelper.forceDelete(bigquery, accessPolicyDataset); + } + + @Test + void testUpdateDataset() { + String datasetName = RemoteBigQueryHelper.generateDatasetName(); + Dataset dataset = + bigquery.create( + DatasetInfo.newBuilder(datasetName) + .setDescription("Some Description") + .setLabels(Collections.singletonMap("a", "b")) .build()); - assertThat(updatedDataset.getDescription()).isEqualTo("Updated Description"); - assertThat(updatedDataset.getLabels()).containsExactly("x", "y"); + try { + assertThat(dataset).isNotNull(); + assertThat(dataset.getDatasetId().getProject()) + .isEqualTo(bigquery.getOptions().getProjectId()); + assertThat(dataset.getDatasetId().getDataset()).isEqualTo(datasetName); + assertThat(dataset.getDescription()).isEqualTo("Some Description"); + assertThat(dataset.getLabels()).containsExactly("a", "b"); + assertThat(dataset.getStorageBillingModel()).isNull(); + assertThat(dataset.getMaxTimeTravelHours()).isNull(); + + Map updateLabels = new HashMap<>(); + updateLabels.put("x", "y"); + updateLabels.put("a", null); + Dataset updatedDataset = + bigquery.update( + dataset.toBuilder() + .setDescription("Updated Description") + .setLabels(updateLabels) + .setStorageBillingModel("LOGICAL") + .setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS) + .build()); + assertThat(updatedDataset.getDescription()).isEqualTo("Updated Description"); + assertThat(updatedDataset.getLabels()).containsExactly("x", "y"); + assertThat(updatedDataset.getStorageBillingModel()).isEqualTo("LOGICAL"); + assertThat(updatedDataset.getMaxTimeTravelHours()).isEqualTo(MAX_TIME_TRAVEL_HOURS); + + updatedDataset = bigquery.update(updatedDataset.toBuilder().setLabels(null).build()); + assertThat(updatedDataset.getLabels()).isEmpty(); + } finally { + dataset.delete(); + } + } - updatedDataset = bigquery.update(updatedDataset.toBuilder().setLabels(null).build()); - assertThat(updatedDataset.getLabels()).isEmpty(); - assertThat(dataset.delete()).isTrue(); + @Test + void testUpdateDatasetWithSelectedFields() { + String datasetName = RemoteBigQueryHelper.generateDatasetName(); + Dataset dataset = + bigquery.create( + DatasetInfo.newBuilder(datasetName).setDescription("Some Description").build()); + try { + assertNotNull(dataset); + assertEquals(bigquery.getOptions().getProjectId(), dataset.getDatasetId().getProject()); + assertEquals(datasetName, dataset.getDatasetId().getDataset()); + assertEquals("Some Description", dataset.getDescription()); + Dataset updatedDataset = + bigquery.update( + dataset.toBuilder().setDescription("Updated Description").build(), + DatasetOption.fields(DatasetField.DESCRIPTION)); + assertEquals("Updated Description", updatedDataset.getDescription()); + assertNull(updatedDataset.getCreationTime()); + assertNull(updatedDataset.getDefaultTableLifetime()); + assertNull(updatedDataset.getAcl()); + assertNull(updatedDataset.getEtag()); + assertNull(updatedDataset.getFriendlyName()); + assertNull(updatedDataset.getGeneratedId()); + assertNull(updatedDataset.getLastModified()); + assertNull(updatedDataset.getLocation()); + assertNull(updatedDataset.getSelfLink()); + assertNull(updatedDataset.getStorageBillingModel()); + assertNull(updatedDataset.getMaxTimeTravelHours()); + } finally { + dataset.delete(); + } } @Test - public void testUpdateDatasetWithSelectedFields() { + void testUpdateDatasetWithAccessPolicyVersion() throws IOException { + String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); + ServiceAccountCredentials credentials = + (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); Dataset dataset = bigquery.create( - DatasetInfo.newBuilder(OTHER_DATASET).setDescription("Some Description").build()); - assertNotNull(dataset); - assertEquals(bigquery.getOptions().getProjectId(), dataset.getDatasetId().getProject()); - assertEquals(OTHER_DATASET, dataset.getDatasetId().getDataset()); - assertEquals("Some Description", dataset.getDescription()); + DatasetInfo.newBuilder(accessPolicyDataset) + .setDescription("Some Description") + .setLabels(Collections.singletonMap("a", "b")) + .build()); + assertThat(dataset).isNotNull(); + + User user = new User(credentials.getClientEmail()); + Acl.Role role = Acl.Role.WRITER; + Acl.Expr condition = + new Expr( + "request.time > timestamp('2024-01-01T00:00:00Z')", + "test condition", + "requests after the year 2024", + "location"); + Acl acl = Acl.of(user, role, condition); + List acls = new ArrayList<>(); + acls.addAll(dataset.getAcl()); + acls.add(acl); + + DatasetOption datasetOption = DatasetOption.accessPolicyVersion(3); + DatasetOption updateModeOption = DatasetOption.updateMode(DatasetUpdateMode.UPDATE_FULL); Dataset updatedDataset = bigquery.update( - dataset.toBuilder().setDescription("Updated Description").build(), - DatasetOption.fields(DatasetField.DESCRIPTION)); + dataset.toBuilder() + .setDescription("Updated Description") + .setLabels(null) + .setAcl(acls) + .build(), + datasetOption, + updateModeOption); + assertNotNull(updatedDataset); assertEquals("Updated Description", updatedDataset.getDescription()); - assertNull(updatedDataset.getCreationTime()); - assertNull(updatedDataset.getDefaultTableLifetime()); - assertNull(updatedDataset.getAcl()); - assertNull(updatedDataset.getEtag()); - assertNull(updatedDataset.getFriendlyName()); - assertNull(updatedDataset.getGeneratedId()); - assertNull(updatedDataset.getLastModified()); - assertNull(updatedDataset.getLocation()); - assertNull(updatedDataset.getSelfLink()); - assertTrue(dataset.delete()); + assertTrue(updatedDataset.getLabels().isEmpty()); + + Acl updatedAclWithCond = null; + for (Acl updatedAcl : updatedDataset.getAcl()) { + if (updatedAcl.getCondition() != null) { + updatedAclWithCond = updatedAcl; + } + } + assertNotNull(updatedAclWithCond); + assertEquals(updatedAclWithCond.getCondition(), condition); + + RemoteBigQueryHelper.forceDelete(bigquery, accessPolicyDataset); } @Test - public void testGetNonExistingTable() { + void testGetNonExistingTable() { assertNull(bigquery.getTable(DATASET, "test_get_non_existing_table")); } @Test - public void testCreateTableWithRangePartitioning() { + void testCreateTableWithRangePartitioning() { String tableName = "test_create_table_rangepartitioning"; TableId tableId = TableId.of(DATASET, tableName); try { @@ -705,31 +1501,676 @@ public void testCreateTableWithRangePartitioning() { } } - public void testCreateTableWithPolicyTags() { - String tableName = "test_create_table_policytags"; + /* TODO(prasmish): replicate this test case for executeSelect on the relevant part */ + @Test + void testJsonType() throws InterruptedException { + String tableName = "test_create_table_jsontype"; TableId tableId = TableId.of(DATASET, tableName); + Schema schema = Schema.of(Field.of("jsonField", StandardSQLTypeName.JSON)); + StandardTableDefinition standardTableDefinition = StandardTableDefinition.of(schema); try { - StandardTableDefinition tableDefinition = - StandardTableDefinition.newBuilder().setSchema(POLICY_SCHEMA).build(); - Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + // Create a table with a JSON column + Table createdTable = bigquery.create(TableInfo.of(tableId, standardTableDefinition)); assertNotNull(createdTable); - Table remoteTable = bigquery.getTable(DATASET, tableName); - assertEquals(POLICY_SCHEMA, remoteTable.getDefinition().getSchema()); + + // Insert 4 rows of JSON data into the JSON column + Map jsonRow1 = + Collections.singletonMap( + "jsonField", "{\"student\" : {\"name\" : \"Jane\", \"id\": 10}}"); + Map jsonRow2 = + Collections.singletonMap("jsonField", "{\"student\" : {\"name\" : \"Joy\", \"id\": 11}}"); + Map jsonRow3 = + Collections.singletonMap( + "jsonField", "{\"student\" : {\"name\" : \"Alice\", \"id\": 12}}"); + Map jsonRow4 = + Collections.singletonMap( + "jsonField", "{\"student\" : {\"name\" : \"Bijoy\", \"id\": 14}}"); + InsertAllRequest request = + InsertAllRequest.newBuilder(tableId) + .addRow(jsonRow1) + .addRow(jsonRow2) + .addRow(jsonRow3) + .addRow(jsonRow4) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.getInsertErrors().size()); + + // Query the JSON column with string positional query parameter + String sql = + "SELECT jsonField.class.student.id FROM " + + tableId.getTable() + + " WHERE JSON_VALUE(jsonField, \"$.class.student.name\") = ? "; + QueryParameterValue stringParameter = QueryParameterValue.string("Jane"); + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(sql) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .addPositionalParameter(stringParameter) + .build(); + TableResult result = bigquery.query(queryJobConfiguration); + assertNotNull(result.getJobId()); + for (FieldValueList values : result.iterateAll()) { + assertEquals("10", values.get(0).getValue()); + } + + // Insert another JSON row parsed from a String with json positional query parameter + String dml = "INSERT INTO " + tableId.getTable() + " (jsonField) VALUES(?)"; + QueryParameterValue jsonParameter = + QueryParameterValue.json("{\"class\" : {\"student\" : [{\"name\" : \"Amy\"}]}}"); + QueryJobConfiguration dmlQueryJobConfiguration = + QueryJobConfiguration.newBuilder(dml) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .addPositionalParameter(jsonParameter) + .build(); + bigquery.query(dmlQueryJobConfiguration); + Page rows = bigquery.listTableData(tableId); + assertEquals(5, Iterables.size(rows.getValues())); + + // Insert another JSON row parsed from a JsonObject with json positional query parameter + JsonObject jsonObject = new JsonObject(); + jsonObject.addProperty("class", "student"); + QueryParameterValue jsonParameter1 = QueryParameterValue.json(jsonObject); + QueryJobConfiguration dmlQueryJobConfiguration1 = + QueryJobConfiguration.newBuilder(dml) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .addPositionalParameter(jsonParameter1) + .build(); + bigquery.query(dmlQueryJobConfiguration1); + Page rows1 = bigquery.listTableData(tableId); + assertEquals(6, Iterables.size(rows1.getValues())); + int rowCount = 0; + for (FieldValueList row : rows1.iterateAll()) { + FieldValue jsonCell = row.get(0); + if (rowCount == 1) assertEquals("{\"class\":\"student\"}", jsonCell.getStringValue()); + rowCount++; + } + + // Try inserting a malformed JSON + QueryParameterValue badJsonParameter = + QueryParameterValue.json("{\"class\" : {\"student\" : [{\"name\" : \"BadBoy\"}}"); + QueryJobConfiguration dmlQueryJobConfiguration2 = + QueryJobConfiguration.newBuilder(dml) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .addPositionalParameter(badJsonParameter) + .build(); + BigQueryException exception = + assertThrows( + BigQueryException.class, + () -> bigquery.query(dmlQueryJobConfiguration2), + "Querying with malformed JSON shouldn't work"); + BigQueryError error = exception.getError(); + assertNotNull(error); + assertEquals("invalidQuery", error.getReason()); } finally { - bigquery.delete(tableId); + assertTrue(bigquery.delete(tableId)); } } + /* TODO(prasmish): replicate this test case for executeSelect on the relevant part */ @Test - public void testCreateAndGetTable() { - String tableName = "test_create_and_get_table"; + void testIntervalType() throws InterruptedException { + String tableName = "test_create_table_intervaltype"; TableId tableId = TableId.of(DATASET, tableName); - TimePartitioning partitioning = TimePartitioning.of(Type.DAY); - Clustering clustering = - Clustering.newBuilder().setFields(ImmutableList.of(STRING_FIELD_SCHEMA.getName())).build(); - StandardTableDefinition tableDefinition = - StandardTableDefinition.newBuilder() - .setSchema(TABLE_SCHEMA) + Schema schema = Schema.of(Field.of("intervalField", StandardSQLTypeName.INTERVAL)); + StandardTableDefinition standardTableDefinition = StandardTableDefinition.of(schema); + try { + // Create a table with a JSON column + Table createdTable = bigquery.create(TableInfo.of(tableId, standardTableDefinition)); + assertNotNull(createdTable); + + // Insert 3 rows of Interval data into the Interval column + Map intervalRow1 = + Collections.singletonMap("intervalField", "123-7 -19 0:24:12.000006"); + Map intervalRow2 = + Collections.singletonMap("intervalField", "P123Y7M-19DT0H24M12.000006S"); + + InsertAllRequest request = + InsertAllRequest.newBuilder(tableId).addRow(intervalRow1).addRow(intervalRow2).build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.getInsertErrors().size()); + + // Insert another Interval row parsed from a String with Interval positional query parameter + String dml = "INSERT INTO " + tableId.getTable() + " (intervalField) VALUES(?)"; + // Parsing from ISO 8610 format String + QueryParameterValue intervalParameter = + QueryParameterValue.interval("P125Y7M-19DT0H24M12.000006S"); + QueryJobConfiguration dmlQueryJobConfiguration = + QueryJobConfiguration.newBuilder(dml) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .addPositionalParameter(intervalParameter) + .build(); + bigquery.query(dmlQueryJobConfiguration); + Page rows = bigquery.listTableData(tableId); + assertEquals(3, Iterables.size(rows.getValues())); + + // Parsing from threeten-extra PeriodDuration + QueryParameterValue intervalParameter1 = + QueryParameterValue.interval( + PeriodDuration.of(Period.of(1, 2, 25), java.time.Duration.ofHours(8))); + QueryJobConfiguration dmlQueryJobConfiguration1 = + QueryJobConfiguration.newBuilder(dml) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .addPositionalParameter(intervalParameter1) + .build(); + bigquery.query(dmlQueryJobConfiguration1); + Page rows1 = bigquery.listTableData(tableId); + assertEquals(4, Iterables.size(rows1.getValues())); + + // Query the Interval column with Interval positional query parameter + String sql = "SELECT intervalField FROM " + tableId.getTable() + " WHERE intervalField = ? "; + QueryParameterValue intervalParameter2 = + QueryParameterValue.interval("P125Y7M-19DT0H24M12.000006S"); + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(sql) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .addPositionalParameter(intervalParameter2) + .build(); + TableResult result = bigquery.query(queryJobConfiguration); + assertNotNull(result.getJobId()); + PeriodDuration periodDuration = + PeriodDuration.of(Period.of(125, 7, -19), java.time.Duration.parse("PT24M12.000006S")); + for (FieldValueList values : result.iterateAll()) { + assertEquals("125-7 -19 0:24:12.000006", values.get(0).getValue()); + assertEquals(periodDuration, values.get(0).getPeriodDuration()); + } + } finally { + assertTrue(bigquery.delete(tableId)); + } + } + + @Test + void testRangeType() throws InterruptedException { + String tableName = "test_range_type_table"; + TableId tableId = TableId.of(DATASET, tableName); + + StandardTableDefinition standardTableDefinition = StandardTableDefinition.of(RANGE_SCHEMA); + try { + // Create a table with a RANGE columns and verify the result. + Table createdTable = bigquery.create(TableInfo.of(tableId, standardTableDefinition)); + assertNotNull(createdTable); + + Table remoteTable = bigquery.getTable(DATASET, tableName); + Schema remoteSchema = remoteTable.getDefinition().getSchema(); + assertEquals(RANGE_SCHEMA, remoteSchema); + + // Insert range values to the table. + InsertAllRequest.Builder request = InsertAllRequest.newBuilder(tableId); + for (String name : RANGE_TEST_VALUES_DATES.keySet()) { + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.put("name", name); + builder.put("date", RANGE_TEST_VALUES_DATES.get(name).getValues()); + builder.put("datetime", RANGE_TEST_VALUES_DATETIME.get(name).getValues()); + builder.put("timestamp", RANGE_TEST_VALUES_TIMESTAMP.get(name).getValues()); + request.addRow(builder.build()); + } + bigquery.insertAll(request.build()); + + // Test listTableData + TableResult result = bigquery.listTableData(DATASET, tableName, RANGE_SCHEMA); + assertEquals(RANGE_TEST_VALUES_DATES.size(), Iterables.size(result.getValues())); + for (FieldValueList values : result.iterateAll()) { + String name = values.get("name").getStringValue(); + assertEquals(RANGE_TEST_VALUES_DATES.get(name), values.get("date").getRangeValue()); + assertEquals(RANGE_TEST_VALUES_DATETIME.get(name), values.get("datetime").getRangeValue()); + assertEquals( + RANGE_TEST_VALUES_EXPECTED_TIMESTAMP.get(name), + values.get("timestamp").getRangeValue()); + } + + // Test Query Parameter by selecting for the bounded Range entry only. + String query = + String.format( + "SELECT name, date, datetime, timestamp\n" + + "FROM %s.%s\n" + + "WHERE date = @dateParam\n" + + "AND datetime = @datetimeParam\n" + + "AND timestamp = @timestampParam", + DATASET, tableName); + + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .addNamedParameter( + "dateParam", QueryParameterValue.range(RANGE_TEST_VALUES_DATES.get("bounded"))) + .addNamedParameter( + "datetimeParam", + QueryParameterValue.range(RANGE_TEST_VALUES_DATETIME.get("bounded"))) + .addNamedParameter( + "timestampParam", + QueryParameterValue.range(RANGE_TEST_VALUES_TIMESTAMP.get("bounded"))) + .build(); + result = bigquery.query(config); + + assertEquals(1, Iterables.size(result.getValues())); + for (FieldValueList values : result.iterateAll()) { + String name = values.get("name").getStringValue(); + assertEquals(RANGE_TEST_VALUES_DATES.get(name), values.get("date").getRangeValue()); + assertEquals(RANGE_TEST_VALUES_DATETIME.get(name), values.get("datetime").getRangeValue()); + assertEquals( + RANGE_TEST_VALUES_EXPECTED_TIMESTAMP.get(name), + values.get("timestamp").getRangeValue()); + } + } finally { + assertTrue(bigquery.delete(tableId)); + } + } + + @Test + void testCreateTableWithConstraints() { + String tableName = "test_create_table_with_constraints"; + TableId tableId = TableId.of(DATASET, tableName); + Field stringFieldWithConstraint = + Field.newBuilder("stringFieldWithConstraint", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("field has a constraint") + .setMaxLength(10L) + .build(); + Field byteFieldWithConstraint = + Field.newBuilder("byteFieldWithConstraint", StandardSQLTypeName.BYTES) + .setMode(Field.Mode.NULLABLE) + .setDescription("field has a constraint") + .setMaxLength(150L) + .build(); + Field numericFieldWithConstraint = + Field.newBuilder("numericFieldWithConstraint", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("field has a constraint") + .setPrecision(20L) + .build(); + Field bigNumericFieldWithConstraint = + Field.newBuilder("bigNumericFieldWithConstraint", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("field has a constraint") + .setPrecision(30L) + .setScale(5L) + .build(); + Schema schema = + Schema.of( + stringFieldWithConstraint, + byteFieldWithConstraint, + numericFieldWithConstraint, + bigNumericFieldWithConstraint); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(schema).build(); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertEquals(schema, remoteTable.getDefinition().getSchema()); + bigquery.delete(tableId); + } + + @Test + void testCreateDatasetWithSpecifiedStorageBillingModel() { + String billingModelDataset = RemoteBigQueryHelper.generateDatasetName(); + DatasetInfo info = + DatasetInfo.newBuilder(billingModelDataset) + .setDescription(DESCRIPTION) + .setStorageBillingModel(STORAGE_BILLING_MODEL) + .setLabels(LABELS) + .build(); + bigquery.create(info); + + Dataset dataset = bigquery.getDataset(DatasetId.of(billingModelDataset)); + assertEquals(STORAGE_BILLING_MODEL, dataset.getStorageBillingModel()); + + RemoteBigQueryHelper.forceDelete(bigquery, billingModelDataset); + } + + @Test + void testCreateDatasetWithSpecificMaxTimeTravelHours() { + String timeTravelDataset = RemoteBigQueryHelper.generateDatasetName(); + DatasetInfo info = + DatasetInfo.newBuilder(timeTravelDataset) + .setDescription(DESCRIPTION) + .setMaxTimeTravelHours(MAX_TIME_TRAVEL_HOURS) + .setLabels(LABELS) + .build(); + bigquery.create(info); + + Dataset dataset = bigquery.getDataset(DatasetId.of(timeTravelDataset)); + assertEquals(MAX_TIME_TRAVEL_HOURS, dataset.getMaxTimeTravelHours()); + + RemoteBigQueryHelper.forceDelete(bigquery, timeTravelDataset); + } + + @Test + void testCreateDatasetWithDefaultMaxTimeTravelHours() { + String timeTravelDataset = RemoteBigQueryHelper.generateDatasetName(); + DatasetInfo info = + DatasetInfo.newBuilder(timeTravelDataset) + .setDescription(DESCRIPTION) + .setLabels(LABELS) + .build(); + bigquery.create(info); + + Dataset dataset = bigquery.getDataset(DatasetId.of(timeTravelDataset)); + // In the backend, BigQuery sets the default Time Travel Window to be 168 hours (7 days). + assertEquals(MAX_TIME_TRAVEL_HOURS_DEFAULT, dataset.getMaxTimeTravelHours()); + + RemoteBigQueryHelper.forceDelete(bigquery, timeTravelDataset); + } + + @Test + void testCreateDatasetWithDefaultCollation() { + String collationDataset = RemoteBigQueryHelper.generateDatasetName(); + DatasetInfo info = + DatasetInfo.newBuilder(collationDataset) + .setDescription(DESCRIPTION) + .setDefaultCollation("und:ci") + .setLabels(LABELS) + .build(); + bigquery.create(info); + + Dataset dataset = bigquery.getDataset(DatasetId.of(collationDataset)); + assertEquals("und:ci", dataset.getDefaultCollation()); + + RemoteBigQueryHelper.forceDelete(bigquery, collationDataset); + } + + @Test + void testCreateDatasetWithAccessPolicyVersion() throws IOException { + String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); + ServiceAccountCredentials credentials = + (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); + User user = new User(credentials.getClientEmail()); + Acl.Role role = Acl.Role.OWNER; + Acl.Expr condition = + new Expr( + "request.time > timestamp('2024-01-01T00:00:00Z')", + "test condition", + "requests after the year 2024", + "location"); + Acl acl = Acl.of(user, role, condition); + DatasetInfo info = + DatasetInfo.newBuilder(accessPolicyDataset) + .setDescription(DESCRIPTION) + .setLabels(LABELS) + .setAcl(ImmutableList.of(acl)) + .build(); + DatasetOption datasetOption = DatasetOption.accessPolicyVersion(3); + Dataset dataset = bigquery.create(info, datasetOption); + assertNotNull(dataset); + assertEquals(DESCRIPTION, dataset.getDescription()); + + Acl remoteAclWithCond = null; + for (Acl remoteAcl : dataset.getAcl()) { + if (remoteAcl.getCondition() != null) { + remoteAclWithCond = remoteAcl; + } + } + assertNotNull(remoteAclWithCond); + assertEquals(remoteAclWithCond.getCondition(), condition); + + RemoteBigQueryHelper.forceDelete(bigquery, accessPolicyDataset); + } + + @Test + void testCreateDatasetWithInvalidAccessPolicyVersion() throws IOException { + String accessPolicyDataset = RemoteBigQueryHelper.generateDatasetName(); + ServiceAccountCredentials credentials = + (ServiceAccountCredentials) GoogleCredentials.getApplicationDefault(); + User user = new User(credentials.getClientEmail()); + Acl.Role role = Acl.Role.READER; + Acl.Expr condition = + new Expr( + "request.time > timestamp('2024-01-01T00:00:00Z')", + "test condition", + "requests after the year 2024", + "location"); + Acl acl = Acl.of(user, role, condition); + DatasetInfo info = + DatasetInfo.newBuilder(accessPolicyDataset) + .setDescription(DESCRIPTION) + .setLabels(LABELS) + .setAcl(ImmutableList.of(acl)) + .build(); + DatasetOption datasetOption = DatasetOption.accessPolicyVersion(4); + assertThrows(BigQueryException.class, () -> bigquery.create(info, datasetOption)); + + RemoteBigQueryHelper.forceDelete(bigquery, accessPolicyDataset); + } + + @Test + void testCreateTableWithDefaultCollation() { + String tableName = "test_create_table_with_default_collation"; + TableId tableId = TableId.of(DATASET, tableName); + Field stringFieldWithoutCollation = + Field.newBuilder("stringFieldWithoutDefaultCollation", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("String field") + .setMaxLength(150L) + .build(); + + Schema schema = Schema.of(stringFieldWithoutCollation); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(schema).build(); + TableInfo tableInfo = + TableInfo.newBuilder(tableId, tableDefinition).setDefaultCollation("und:ci").build(); + + // Create table with default collation but fields do not have collation. + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + + // Fetch the created table and its metadata + // to verify default collation is assigned to fields + Table remoteTable = bigquery.getTable(DATASET, tableName); + Schema remoteSchema = remoteTable.getDefinition().getSchema(); + // Schema should not be equal because default collation has been added to the fields. + assertNotEquals(schema, remoteSchema); + assertEquals("und:ci", remoteTable.getDefaultCollation()); + FieldList fieldList = remoteSchema.getFields(); + for (Field field : fieldList) { + if (field.getName().equals("stringFieldWithoutDefaultCollation")) { + assertEquals("und:ci", field.getCollation()); + } + } + + bigquery.delete(tableId); + } + + @Test + void testCreateFieldWithDefaultCollation() { + String tableName = "test_create_field_with_default_collation"; + TableId tableId = TableId.of(DATASET, tableName); + Field stringFieldWithCollation = + Field.newBuilder("stringFieldWithDefaultCollation", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("String field") + .setCollation("und:ci") + .setMaxLength(150L) + .build(); + + Schema schema = Schema.of(stringFieldWithCollation); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(schema).build(); + TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); + + // Create table with not default collation and fields that have collation + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + + // Fetch the created table and its metadata + // to verify collation is assigned to fields + Table remoteTable = bigquery.getTable(DATASET, tableName); + Schema remoteSchema = remoteTable.getDefinition().getSchema(); + // Schema should be equal because collation has been added to the fields. + assertEquals(schema, remoteSchema); + assertNull(remoteTable.getDefaultCollation()); + FieldList fieldList = remoteSchema.getFields(); + for (Field field : fieldList) { + if (field.getName().equals("stringFieldWithoutDefaultCollation")) { + assertEquals("und:ci", field.getCollation()); + } + } + bigquery.delete(tableId); + } + + @Test + void testCreateTableWithDefaultValueExpression() { + String tableName = "test_create_table_with_default_value_expression"; + TableId tableId = TableId.of(DATASET, tableName); + Field stringFieldWithDefaultValueExpression = + Field.newBuilder("stringFieldWithDefaultValueExpression", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("String field with default value expression") + .setDefaultValueExpression("'FOO'") + .setMaxLength(150L) + .build(); + Field timestampFieldWithDefaultValueExpression = + Field.newBuilder("timestampFieldWithDefaultValueExpression", StandardSQLTypeName.TIMESTAMP) + .setMode(Field.Mode.NULLABLE) + .setDescription("Timestamp field with default value expression") + .setDefaultValueExpression("CURRENT_TIMESTAMP") + .build(); + Schema schema = + Schema.of(stringFieldWithDefaultValueExpression, timestampFieldWithDefaultValueExpression); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(schema).build(); + + // Create table with fields that have default value expression + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + + // Fetch the created table and its metadata + // to verify default value expression is assigned to fields + Table remoteTable = bigquery.getTable(DATASET, tableName); + Schema remoteSchema = remoteTable.getDefinition().getSchema(); + assertEquals(schema, remoteSchema); + FieldList fieldList = remoteSchema.getFields(); + for (Field field : fieldList) { + if (field.getName().equals("timestampFieldWithDefaultValueExpression")) { + assertEquals("CURRENT_TIMESTAMP", field.getDefaultValueExpression()); + } + if (field.getName().equals("stringFieldWithDefaultValueExpression")) { + assertEquals("'FOO'", field.getDefaultValueExpression()); + } + } + + // Insert value into the created table + // to verify default values are inserted when value is missing + String rowId1 = "rowId1"; + String rowId2 = "rowId2"; + List rows = new ArrayList<>(); + Map row1 = new HashMap<>(); + row1.put("timestampFieldWithDefaultValueExpression", "2022-08-22 00:45:12 UTC"); + Map row2 = new HashMap<>(); + row2.put("timestampFieldWithDefaultValueExpression", "2022-08-23 00:44:33 UTC"); + rows.add(RowToInsert.of(rowId1, row1)); + rows.add(RowToInsert.of(rowId2, row2)); + InsertAllResponse response1 = remoteTable.insert(rows); + + TableResult tableData = bigquery.listTableData(DATASET, tableName, schema); + String insertedField = "stringFieldWithDefaultValueExpression"; + for (FieldValueList row : tableData.iterateAll()) { + assertEquals("FOO", row.get(insertedField).getValue()); + } + bigquery.delete(tableId); + } + + @Test + void testCreateAndUpdateTableWithPolicyTags() throws IOException { + // Set up policy tags in the datacatalog service + try (PolicyTagManagerClient policyTagManagerClient = PolicyTagManagerClient.create()) { + CreateTaxonomyRequest createTaxonomyRequest = + CreateTaxonomyRequest.newBuilder() + .setParent(String.format("projects/%s/locations/%s", PROJECT_ID, "us")) + .setTaxonomy( + Taxonomy.newBuilder() + // DisplayName must be unique across org. Use UUID rather than time to ensure + // no collisions + // from parallel test invocations + .setDisplayName( + String.format( + "testing taxonomy %s", UUID.randomUUID().toString().substring(0, 8))) + .setDescription("taxonomy created for integration tests") + .addActivatedPolicyTypes(PolicyType.FINE_GRAINED_ACCESS_CONTROL) + .build()) + .build(); + Taxonomy taxonomyResponse = policyTagManagerClient.createTaxonomy(createTaxonomyRequest); + String taxonomyId = taxonomyResponse.getName(); + + CreatePolicyTagRequest createPolicyTagRequest = + CreatePolicyTagRequest.newBuilder() + .setParent(taxonomyId) + .setPolicyTag(PolicyTag.newBuilder().setDisplayName("ExamplePolicyTag").build()) + .build(); + PolicyTag policyTagResponse = policyTagManagerClient.createPolicyTag(createPolicyTagRequest); + String policyTagId = policyTagResponse.getName(); + PolicyTags policyTags = + PolicyTags.newBuilder().setNames(ImmutableList.of(policyTagId)).build(); + Field stringFieldWithPolicy = + Field.newBuilder("StringFieldWithPolicy", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("field has a policy") + .setPolicyTags(policyTags) + .build(); + Schema policySchema = + Schema.of(STRING_FIELD_SCHEMA, stringFieldWithPolicy, INTEGER_FIELD_SCHEMA); + + // Test: Amend an existing schema with a policy tag. + String tableNameForUpdate = "test_update_table_policytags"; + TableId tableIdForUpdate = TableId.of(DATASET, tableNameForUpdate); + TableInfo tableInfo = + TableInfo.newBuilder(tableIdForUpdate, StandardTableDefinition.of(TABLE_SCHEMA)) + .setDescription("policy tag update test table") + .build(); + Table createdTableForUpdate = bigquery.create(tableInfo); + assertNotNull(createdTableForUpdate); + Schema schema = createdTableForUpdate.getDefinition().getSchema(); + FieldList fields = schema.getFields(); + // Create a new schema adding the current fields, plus the new policy tag field + List fieldList = new ArrayList<>(); + for (Field field : fields) { + fieldList.add(field); + } + fieldList.add(stringFieldWithPolicy); + Schema updatedSchemaWithPolicyTag = Schema.of(fieldList); + Table updatedTable = + createdTableForUpdate.toBuilder() + .setDefinition(StandardTableDefinition.of(updatedSchemaWithPolicyTag)) + .build(); + updatedTable.update(); + Table remoteUpdatedTable = bigquery.getTable(DATASET, tableNameForUpdate); + assertEquals( + updatedSchemaWithPolicyTag, + remoteUpdatedTable.getDefinition().getSchema()); + bigquery.delete(tableIdForUpdate); + + // Test: Create a new table with a policy tag defined. + String tableName = "test_create_table_policytags"; + TableId tableId = TableId.of(DATASET, tableName); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(policySchema).build(); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertEquals(policySchema, remoteTable.getDefinition().getSchema()); + bigquery.delete(tableId); + + // Clean up policy tags + policyTagManagerClient.deletePolicyTag(policyTagId); + policyTagManagerClient.deleteTaxonomy(taxonomyId); + } + } + + @Test + void testCreateAndGetTable() { + String tableName = "test_create_and_get_table"; + TableId tableId = TableId.of(DATASET, tableName); + TimePartitioning partitioning = TimePartitioning.of(Type.DAY); + Clustering clustering = + Clustering.newBuilder().setFields(ImmutableList.of(STRING_FIELD_SCHEMA.getName())).build(); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder() + .setSchema(TABLE_SCHEMA) .setTimePartitioning(partitioning) .setClustering(clustering) .build(); @@ -747,6 +2188,10 @@ public void testCreateAndGetTable() { assertNotNull(remoteTable.getLastModifiedTime()); assertNotNull(remoteTable.getDefinition().getNumBytes()); assertNotNull(remoteTable.getDefinition().getNumLongTermBytes()); + assertNotNull(remoteTable.getDefinition().getNumTotalLogicalBytes()); + assertNotNull(remoteTable.getDefinition().getNumActiveLogicalBytes()); + assertNotNull( + remoteTable.getDefinition().getNumLongTermLogicalBytes()); assertNotNull(remoteTable.getDefinition().getNumRows()); assertEquals( partitioning, remoteTable.getDefinition().getTimePartitioning()); @@ -755,52 +2200,226 @@ public void testCreateAndGetTable() { } @Test - public void testCreateAndGetTableWithSelectedField() { - String tableName = "test_create_and_get_selected_fields_table"; + void testCreateAndListTable() { + String tableName = "test_create_and_list_table"; TableId tableId = TableId.of(DATASET, tableName); - StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); - Table createdTable = - bigquery.create( - TableInfo.newBuilder(tableId, tableDefinition) - .setLabels(Collections.singletonMap("a", "b")) - .build()); + TimePartitioning partitioning = TimePartitioning.of(Type.DAY); + Clustering clustering = + Clustering.newBuilder().setFields(ImmutableList.of(STRING_FIELD_SCHEMA.getName())).build(); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder() + .setSchema(TABLE_SCHEMA) + .setTimePartitioning(partitioning) + .setClustering(clustering) + .build(); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); assertNotNull(createdTable); assertEquals(DATASET, createdTable.getTableId().getDataset()); assertEquals(tableName, createdTable.getTableId().getTable()); - Table remoteTable = - bigquery.getTable( - DATASET, tableName, TableOption.fields(TableField.CREATION_TIME, TableField.LABELS)); + + Page
    tables = bigquery.listTables(DATASET); + boolean found = false; + Iterator
    tableIterator = tables.getValues().iterator(); + // Find createdTable and validate the table definition. + while (tableIterator.hasNext() && !found) { + Table table = tableIterator.next(); + if (table.getTableId().equals(createdTable.getTableId())) { + StandardTableDefinition definition = table.getDefinition(); + assertThat(definition.getClustering()).isNotNull(); + assertThat(definition.getTimePartitioning()).isNotNull(); + found = true; + } + } + assertTrue(found); + assertTrue(createdTable.delete()); + } + + @Test + void testCreateAndGetTableWithBasicTableMetadataView() { + String tableName = "test_create_and_get_table_with_basic_metadata_view"; + TableId tableId = TableId.of(DATASET, tableName); + TimePartitioning partitioning = TimePartitioning.of(Type.DAY); + Clustering clustering = + Clustering.newBuilder().setFields(ImmutableList.of(STRING_FIELD_SCHEMA.getName())).build(); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder() + .setSchema(TABLE_SCHEMA) + .setTimePartitioning(partitioning) + .setClustering(clustering) + .build(); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + TableOption tableOption = BigQuery.TableOption.tableMetadataView(TableMetadataView.BASIC); + Table remoteTable = bigquery.getTable(DATASET, tableName, tableOption); assertNotNull(remoteTable); assertTrue(remoteTable.getDefinition() instanceof StandardTableDefinition); assertEquals(createdTable.getTableId(), remoteTable.getTableId()); assertEquals(TableDefinition.Type.TABLE, remoteTable.getDefinition().getType()); - assertThat(remoteTable.getLabels()).containsExactly("a", "b"); - assertNotNull(remoteTable.getCreationTime()); - assertNull(remoteTable.getDefinition().getSchema()); + assertEquals(TABLE_SCHEMA, remoteTable.getDefinition().getSchema()); + // Next four values are considered transient fields that should not be calculated assertNull(remoteTable.getLastModifiedTime()); assertNull(remoteTable.getDefinition().getNumBytes()); assertNull(remoteTable.getDefinition().getNumLongTermBytes()); assertNull(remoteTable.getDefinition().getNumRows()); - assertNull(remoteTable.getDefinition().getTimePartitioning()); - assertNull(remoteTable.getDefinition().getClustering()); assertTrue(remoteTable.delete()); } @Test - public void testCreateExternalTable() throws InterruptedException { - String tableName = "test_create_external_table"; + void testCreateAndGetTableWithFullTableMetadataView() { + String tableName = "test_create_and_get_table_with_full_metadata_view"; TableId tableId = TableId.of(DATASET, tableName); - ExternalTableDefinition externalTableDefinition = - ExternalTableDefinition.of( - "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()); - TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); - Table createdTable = bigquery.create(tableInfo); + TimePartitioning partitioning = TimePartitioning.of(Type.DAY); + Clustering clustering = + Clustering.newBuilder().setFields(ImmutableList.of(STRING_FIELD_SCHEMA.getName())).build(); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder() + .setSchema(TABLE_SCHEMA) + .setTimePartitioning(partitioning) + .setClustering(clustering) + .build(); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); assertNotNull(createdTable); assertEquals(DATASET, createdTable.getTableId().getDataset()); assertEquals(tableName, createdTable.getTableId().getTable()); - Table remoteTable = bigquery.getTable(DATASET, tableName); + TableOption tableOption = BigQuery.TableOption.tableMetadataView(TableMetadataView.FULL); + Table remoteTable = bigquery.getTable(DATASET, tableName, tableOption); assertNotNull(remoteTable); - assertTrue(remoteTable.getDefinition() instanceof ExternalTableDefinition); + assertTrue(remoteTable.getDefinition() instanceof StandardTableDefinition); + assertEquals(createdTable.getTableId(), remoteTable.getTableId()); + assertEquals(TableDefinition.Type.TABLE, remoteTable.getDefinition().getType()); + assertEquals(TABLE_SCHEMA, remoteTable.getDefinition().getSchema()); + assertNotNull(remoteTable.getLastModifiedTime()); + assertNotNull(remoteTable.getDefinition().getNumBytes()); + assertNotNull(remoteTable.getDefinition().getNumLongTermBytes()); + assertNotNull(remoteTable.getDefinition().getNumRows()); + assertTrue(remoteTable.delete()); + } + + @Test + void testCreateAndGetTableWithStorageStatsTableMetadataView() { + String tableName = "test_create_and_get_table_with_storage_stats_metadata_view"; + TableId tableId = TableId.of(DATASET, tableName); + TimePartitioning partitioning = TimePartitioning.of(Type.DAY); + Clustering clustering = + Clustering.newBuilder().setFields(ImmutableList.of(STRING_FIELD_SCHEMA.getName())).build(); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder() + .setSchema(TABLE_SCHEMA) + .setTimePartitioning(partitioning) + .setClustering(clustering) + .build(); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + TableOption tableOption = + BigQuery.TableOption.tableMetadataView(TableMetadataView.STORAGE_STATS); + Table remoteTable = bigquery.getTable(DATASET, tableName, tableOption); + assertNotNull(remoteTable); + assertTrue(remoteTable.getDefinition() instanceof StandardTableDefinition); + assertEquals(createdTable.getTableId(), remoteTable.getTableId()); + assertEquals(TableDefinition.Type.TABLE, remoteTable.getDefinition().getType()); + assertEquals(TABLE_SCHEMA, remoteTable.getDefinition().getSchema()); + assertNotNull(remoteTable.getLastModifiedTime()); + assertNotNull(remoteTable.getDefinition().getNumBytes()); + assertNotNull(remoteTable.getDefinition().getNumLongTermBytes()); + assertNotNull(remoteTable.getDefinition().getNumRows()); + assertTrue(remoteTable.delete()); + } + + @Test + void testCreateAndGetTableWithUnspecifiedTableMetadataView() { + String tableName = "test_create_and_get_table_with_unspecified_metadata_view"; + TableId tableId = TableId.of(DATASET, tableName); + TimePartitioning partitioning = TimePartitioning.of(Type.DAY); + Clustering clustering = + Clustering.newBuilder().setFields(ImmutableList.of(STRING_FIELD_SCHEMA.getName())).build(); + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder() + .setSchema(TABLE_SCHEMA) + .setTimePartitioning(partitioning) + .setClustering(clustering) + .build(); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + TableOption tableOption = + BigQuery.TableOption.tableMetadataView(TableMetadataView.TABLE_METADATA_VIEW_UNSPECIFIED); + Table remoteTable = bigquery.getTable(DATASET, tableName, tableOption); + assertNotNull(remoteTable); + assertTrue(remoteTable.getDefinition() instanceof StandardTableDefinition); + assertEquals(createdTable.getTableId(), remoteTable.getTableId()); + assertEquals(TableDefinition.Type.TABLE, remoteTable.getDefinition().getType()); + assertEquals(TABLE_SCHEMA, remoteTable.getDefinition().getSchema()); + assertNotNull(remoteTable.getLastModifiedTime()); + assertNotNull(remoteTable.getDefinition().getNumBytes()); + assertNotNull(remoteTable.getDefinition().getNumLongTermBytes()); + assertNotNull(remoteTable.getDefinition().getNumRows()); + assertTrue(remoteTable.delete()); + } + + @Test + void testCreateAndGetTableWithSelectedField() { + String tableName = "test_create_and_get_selected_fields_table"; + TableId tableId = TableId.of(DATASET, tableName); + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + Table createdTable = + bigquery.create( + TableInfo.newBuilder(tableId, tableDefinition) + .setLabels(Collections.singletonMap("a", "b")) + .build()); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + Table remoteTable = + bigquery.getTable( + DATASET, tableName, TableOption.fields(TableField.CREATION_TIME, TableField.LABELS)); + assertNotNull(remoteTable); + assertTrue(remoteTable.getDefinition() instanceof StandardTableDefinition); + assertEquals(createdTable.getTableId(), remoteTable.getTableId()); + assertEquals(TableDefinition.Type.TABLE, remoteTable.getDefinition().getType()); + assertThat(remoteTable.getLabels()).containsExactly("a", "b"); + assertNotNull(remoteTable.getCreationTime()); + assertNull(remoteTable.getDefinition().getSchema()); + assertNull(remoteTable.getLastModifiedTime()); + assertNull(remoteTable.getDefinition().getNumBytes()); + assertNull(remoteTable.getDefinition().getNumLongTermBytes()); + assertNull( + remoteTable.getDefinition().getNumTimeTravelPhysicalBytes()); + assertNull(remoteTable.getDefinition().getNumTotalLogicalBytes()); + assertNull(remoteTable.getDefinition().getNumActiveLogicalBytes()); + assertNull(remoteTable.getDefinition().getNumLongTermLogicalBytes()); + assertNull(remoteTable.getDefinition().getNumTotalPhysicalBytes()); + assertNull(remoteTable.getDefinition().getNumActivePhysicalBytes()); + assertNull(remoteTable.getDefinition().getNumLongTermPhysicalBytes()); + assertNull(remoteTable.getDefinition().getNumRows()); + assertNull(remoteTable.getDefinition().getTimePartitioning()); + assertNull(remoteTable.getDefinition().getClustering()); + assertTrue(remoteTable.delete()); + } + + @Test + void testCreateExternalTable() throws InterruptedException { + String tableName = "test_create_external_table"; + TableId tableId = TableId.of(DATASET, tableName); + + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.of( + "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()) + .toBuilder() + .setMaxStaleness("INTERVAL 15 MINUTE") + .build(); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertTrue(remoteTable.getDefinition() instanceof ExternalTableDefinition); assertEquals(createdTable.getTableId(), remoteTable.getTableId()); assertEquals(TABLE_SCHEMA, remoteTable.getDefinition().getSchema()); QueryJobConfiguration config = @@ -832,7 +2451,7 @@ public void testCreateExternalTable() throws InterruptedException { assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); assertEquals(integerValue, integerCell.getLongValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); integerValue = ~integerValue & 0x1; rowCount++; } @@ -841,7 +2460,64 @@ public void testCreateExternalTable() throws InterruptedException { } @Test - public void testCreateViewTable() throws InterruptedException { + void testSetPermExternalTableSchema() { + String tableName = "test_create_external_table_perm"; + TableId tableId = TableId.of(DATASET, tableName); + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.newBuilder( + "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) + .setSchema(TABLE_SCHEMA) + .setConnectionId( + "projects/java-docs-samples-testing/locations/us/connections/DEVREL_TEST_CONNECTION") + .build(); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + Table createdTable = bigquery.create(tableInfo); + + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertTrue(remoteTable.delete()); + } + + @Test + void testUpdatePermExternableTableWithAutodetectSchemaUpdatesSchema() { + String tableName = "test_create_external_table_perm_with_auto_detect"; + TableId tableId = TableId.of(DATASET, tableName); + Schema setSchema = Schema.of(TIMESTAMP_FIELD_SCHEMA, STRING_FIELD_SCHEMA); + + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.newBuilder( + "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) + .setSchema(setSchema) + .build(); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + Table createdTable = bigquery.create(tableInfo); + + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertEquals(setSchema, remoteTable.getDefinition().getSchema()); + + Table updatedTable = + bigquery.update( + createdTable.toBuilder() + .setDefinition( + ((ExternalTableDefinition) createdTable.getDefinition()) + .toBuilder().setSchema(null).setAutodetect(true).build()) + .build(), + BigQuery.TableOption.autodetectSchema(true)); + // Schema should change. + assertTrue(!updatedTable.getDefinition().getSchema().equals(setSchema)); + + assertTrue(remoteTable.delete()); + } + + @Test + void testCreateViewTable() throws InterruptedException { String tableName = "test_create_view_table"; TableId tableId = TableId.of(DATASET, tableName); ViewDefinition viewDefinition = @@ -867,6 +2543,7 @@ public void testCreateViewTable() throws InterruptedException { .setUseLegacySql(true) .build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); int rowCount = 0; for (FieldValueList row : result.getValues()) { FieldValue timestampCell = row.get(0); @@ -880,7 +2557,7 @@ public void testCreateViewTable() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); rowCount++; } assertEquals(2, rowCount); @@ -888,7 +2565,7 @@ public void testCreateViewTable() throws InterruptedException { } @Test - public void testCreateMaterializedViewTable() { + void testCreateMaterializedViewTable() { String tableName = "test_materialized_view_table"; TableId tableId = TableId.of(DATASET, tableName); MaterializedViewDefinition viewDefinition = @@ -912,7 +2589,7 @@ public void testCreateMaterializedViewTable() { } @Test - public void testTableIAM() { + void testTableIAM() { String tableName = "test_iam_table"; TableId tableId = TableId.of(DATASET, tableName); StandardTableDefinition tableDefinition = @@ -930,8 +2607,7 @@ public void testTableIAM() { // get and modify policy Policy policy = bigquery.getIamPolicy(tableId); Policy editedPolicy = - policy - .toBuilder() + policy.toBuilder() .addIdentity(Role.of("roles/bigquery.dataViewer"), Identity.allUsers()) .build(); Policy updatedPolicy = bigquery.setIamPolicy(tableId, editedPolicy); @@ -942,7 +2618,7 @@ public void testTableIAM() { } @Test - public void testListTables() { + void testListTables() { String tableName = "test_list_tables"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); @@ -961,13 +2637,14 @@ public void testListTables() { } @Test - public void testListTablesWithPartitioning() { + void testListTablesWithPartitioning() { + long expirationMs = 86400000L; + Type partitionType = Type.DAY; String tableName = "test_list_tables_partitioning"; - TimePartitioning timePartitioning = TimePartitioning.of(Type.DAY, EXPIRATION_MS); StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() .setSchema(TABLE_SCHEMA) - .setTimePartitioning(timePartitioning) + .setTimePartitioning(TimePartitioning.of(partitionType, expirationMs)) .build(); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); Table createdPartitioningTable = bigquery.create(tableInfo); @@ -975,16 +2652,25 @@ public void testListTablesWithPartitioning() { try { Page
    tables = bigquery.listTables(DATASET); boolean found = false; - Iterator
    tableIterator = tables.getValues().iterator(); - while (tableIterator.hasNext() && !found) { - StandardTableDefinition standardTableDefinition = tableIterator.next().getDefinition(); - if (standardTableDefinition.getTimePartitioning() != null - && standardTableDefinition.getTimePartitioning().getType().equals(Type.DAY) - && standardTableDefinition - .getTimePartitioning() - .getExpirationMs() - .equals(EXPIRATION_MS)) { + for (Table table : tables.getValues()) { + // Look for the table that matches the newly partitioned table. Other tables in the + // dataset may not be partitioned or may be partitioned but may not be expiring + // (e.g. `null` expirationMs). + if (!table + .getTableId() + .getTable() + .equals(createdPartitioningTable.getTableId().getTable())) { + continue; + } + + StandardTableDefinition standardTableDefinition = table.getDefinition(); + TimePartitioning timePartitioning = standardTableDefinition.getTimePartitioning(); + assertNotNull(timePartitioning); + assertNotNull(timePartitioning.getExpirationMs()); + if (timePartitioning.getType().equals(partitionType) + && timePartitioning.getExpirationMs().equals(expirationMs)) { found = true; + break; } } assertTrue(found); @@ -994,7 +2680,7 @@ public void testListTablesWithPartitioning() { } @Test - public void testListTablesWithRangePartitioning() { + void testListTablesWithRangePartitioning() { String tableName = "test_list_tables_range_partitioning"; StandardTableDefinition tableDefinition = StandardTableDefinition.newBuilder() @@ -1007,25 +2693,34 @@ public void testListTablesWithRangePartitioning() { try { Page
    tables = bigquery.listTables(DATASET); boolean found = false; - Iterator
    tableIterator = tables.getValues().iterator(); - while (tableIterator.hasNext() && !found) { - StandardTableDefinition standardTableDefinition = tableIterator.next().getDefinition(); - if (standardTableDefinition.getRangePartitioning() != null) { - assertEquals(RANGE_PARTITIONING, standardTableDefinition.getRangePartitioning()); - assertEquals(RANGE, standardTableDefinition.getRangePartitioning().getRange()); - assertEquals("IntegerField", standardTableDefinition.getRangePartitioning().getField()); - found = true; + for (Table table : tables.getValues()) { + // Look for the table that matches the newly partitioned table. Other tables in the + // dataset may not be partitioned and cannot match to them. + if (!table + .getTableId() + .getTable() + .equals(createdRangePartitioningTable.getTableId().getTable())) { + continue; } + + StandardTableDefinition standardTableDefinition = table.getDefinition(); + RangePartitioning rangePartitioning = standardTableDefinition.getRangePartitioning(); + assertNotNull(rangePartitioning); + assertEquals(RANGE_PARTITIONING, rangePartitioning); + assertEquals(RANGE, rangePartitioning.getRange()); + assertEquals("IntegerField", rangePartitioning.getField()); + found = true; + break; } - assertTrue(found); + assertTrue(found, "Created range partitioned table was not found in the dataset list."); } finally { createdRangePartitioningTable.delete(); } } @Test - public void testListPartitions() throws InterruptedException { - String tableName = "test_table_partitions_" + UUID.randomUUID().toString().substring(0, 8); + void testListPartitions() throws InterruptedException { + String tableName = generateRandomName("test_table_partitions_"); Date date = Date.fromJavaUtilDate(new java.util.Date()); String partitionDate = date.toString().replaceAll("-", ""); TableId tableId = TableId.of(DATASET, tableName + "$" + partitionDate); @@ -1050,7 +2745,7 @@ public void testListPartitions() throws InterruptedException { } @Test - public void testUpdateTable() { + void testUpdateTable() { String tableName = "test_update_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = @@ -1067,8 +2762,7 @@ public void testUpdateTable() { updateLabels.put("a", null); Table updatedTable = bigquery.update( - createdTable - .toBuilder() + createdTable.toBuilder() .setDescription("Updated Description") .setLabels(updateLabels) .build()); @@ -1081,7 +2775,7 @@ public void testUpdateTable() { } @Test - public void testUpdateTimePartitioning() { + void testUpdateTimePartitioning() { String tableName = "testUpdateTimePartitioning"; TableId tableId = TableId.of(DATASET, tableName); StandardTableDefinition tableDefinition = @@ -1091,50 +2785,39 @@ public void testUpdateTimePartitioning() { .build(); Table table = bigquery.create(TableInfo.of(tableId, tableDefinition)); - assertThat(table.getDefinition()).isInstanceOf(StandardTableDefinition.class); - assertThat( - ((StandardTableDefinition) table.getDefinition()) - .getTimePartitioning() - .getExpirationMs()) + TableDefinition definition = table.getDefinition(); + assertThat(definition).isInstanceOf(StandardTableDefinition.class); + assertThat(((StandardTableDefinition) definition).getTimePartitioning().getExpirationMs()) .isNull(); table = - table - .toBuilder() + table.toBuilder() .setDefinition( - tableDefinition - .toBuilder() + tableDefinition.toBuilder() .setTimePartitioning(TimePartitioning.of(Type.DAY, 42L)) .build()) .build() .update(BigQuery.TableOption.fields(BigQuery.TableField.TIME_PARTITIONING)); + TableDefinition updatedDefinition = table.getDefinition(); assertThat( - ((StandardTableDefinition) table.getDefinition()) - .getTimePartitioning() - .getExpirationMs()) + ((StandardTableDefinition) updatedDefinition).getTimePartitioning().getExpirationMs()) .isEqualTo(42L); table = - table - .toBuilder() + table.toBuilder() .setDefinition( - tableDefinition - .toBuilder() + tableDefinition.toBuilder() .setTimePartitioning(TimePartitioning.of(Type.DAY)) .build()) .build() .update(BigQuery.TableOption.fields(BigQuery.TableField.TIME_PARTITIONING)); - assertThat( - ((StandardTableDefinition) table.getDefinition()) - .getTimePartitioning() - .getExpirationMs()) + assertThat(((StandardTableDefinition) definition).getTimePartitioning().getExpirationMs()) .isNull(); table.delete(); } - @Test - public void testUpdateTableWithSelectedFields() { + void testUpdateTableWithSelectedFields() { String tableName = "test_update_with_selected_fields_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); @@ -1152,34 +2835,56 @@ public void testUpdateTableWithSelectedFields() { assertNull(updatedTable.getLastModifiedTime()); assertNull(updatedTable.getDefinition().getNumBytes()); assertNull(updatedTable.getDefinition().getNumLongTermBytes()); + assertNull( + updatedTable.getDefinition().getNumTimeTravelPhysicalBytes()); + assertNull(updatedTable.getDefinition().getNumTotalLogicalBytes()); + assertNull(updatedTable.getDefinition().getNumActiveLogicalBytes()); + assertNull(updatedTable.getDefinition().getNumLongTermLogicalBytes()); + assertNull(updatedTable.getDefinition().getNumTotalPhysicalBytes()); + assertNull(updatedTable.getDefinition().getNumActivePhysicalBytes()); + assertNull(updatedTable.getDefinition().getNumLongTermPhysicalBytes()); assertNull(updatedTable.getDefinition().getNumRows()); assertTrue(createdTable.delete()); } @Test - public void testUpdateNonExistingTable() { + void testUpdateNonExistingTable() { TableInfo tableInfo = TableInfo.of( TableId.of(DATASET, "test_update_non_existing_table"), StandardTableDefinition.of(SIMPLE_SCHEMA)); - try { - bigquery.update(tableInfo); - fail("BigQueryException was expected"); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error); - assertEquals("notFound", error.getReason()); - assertNotNull(error.getMessage()); - } + BigQueryException exception = + assertThrows( + BigQueryException.class, + () -> bigquery.update(tableInfo), + "BigQueryException was expected"); + BigQueryError error = exception.getError(); + assertNotNull(error); + assertEquals("notFound", error.getReason()); + assertNotNull(error.getMessage()); } @Test - public void testDeleteNonExistingTable() { + void testDeleteNonExistingTable() { assertFalse(bigquery.delete("test_delete_non_existing_table")); } @Test - public void testInsertAll() throws IOException { + void testDeleteJob() { + String query = "SELECT 17 as foo"; + QueryJobConfiguration config = QueryJobConfiguration.of(query); + String jobName = "jobId_" + UUID.randomUUID().toString(); + JobId jobId = + JobId.newBuilder().setLocation("us-east1").setJob(jobName).setProject(PROJECT_ID).build(); + Job createdJob = bigquery.create(JobInfo.of(jobId, config)); + Job remoteJob = bigquery.getJob(createdJob.getJobId()); + assertEquals(createdJob.getJobId(), remoteJob.getJobId()); + assertTrue(bigquery.delete(jobId)); + assertNull(bigquery.getJob(jobId)); + } + + @Test + void testInsertAll() throws IOException { String tableName = "test_insert_all_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); @@ -1238,7 +2943,7 @@ public void testInsertAll() throws IOException { } @Test - public void testInsertAllWithSuffix() throws InterruptedException { + void testInsertAllWithSuffix() throws InterruptedException { String tableName = "test_insert_all_with_suffix_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); @@ -1306,7 +3011,7 @@ public void testInsertAllWithSuffix() throws InterruptedException { } @Test - public void testInsertAllWithErrors() { + void testInsertAllWithErrors() { String tableName = "test_insert_all_with_errors_table"; StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(TableId.of(DATASET, tableName), tableDefinition); @@ -1374,8 +3079,9 @@ public void testInsertAllWithErrors() { assertTrue(bigquery.delete(TableId.of(DATASET, tableName))); } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testListAllTableData() { + void testListAllTableData() { Page rows = bigquery.listTableData(TABLE_ID); int rowCount = 0; for (FieldValueList row : rows.getValues()) { @@ -1403,13 +3109,13 @@ public void testListAllTableData() { assertEquals("stringValue", stringCell.getStringValue()); assertEquals(0, integerArrayCell.getRepeatedValue().get(0).getLongValue()); assertEquals(1, integerArrayCell.getRepeatedValue().get(1).getLongValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); assertArrayEquals(BYTES, bytesCell.getBytesValue()); assertEquals(-14182916000000L, recordCell.getRecordValue().get(0).getTimestampValue()); assertTrue(recordCell.getRecordValue().get(1).isNull()); assertEquals(1, recordCell.getRecordValue().get(2).getRepeatedValue().get(0).getLongValue()); assertEquals(0, recordCell.getRecordValue().get(2).getRepeatedValue().get(1).getLongValue()); - assertEquals(true, recordCell.getRecordValue().get(3).getBooleanValue()); + assertTrue(recordCell.getRecordValue().get(3).getBooleanValue()); assertEquals(3, integerCell.getLongValue()); assertEquals(1.2, floatCell.getDoubleValue(), 0.0001); assertEquals("POINT(-122.35022 47.649154)", geographyCell.getStringValue()); @@ -1420,7 +3126,7 @@ public void testListAllTableData() { } @Test - public void testListPageWithStartIndex() { + void testListPageWithStartIndex() { String tableName = "midyear_population_agespecific"; TableId tableId = TableId.of(PUBLIC_PROJECT, PUBLIC_DATASET, tableName); Table table = bigquery.getTable(tableId); @@ -1439,7 +3145,7 @@ public void testListPageWithStartIndex() { } @Test - public void testModelLifecycle() throws InterruptedException { + void testModelLifecycle() throws InterruptedException { String modelName = RemoteBigQueryHelper.generateModelName(); @@ -1452,7 +3158,7 @@ public void testModelLifecycle() throws InterruptedException { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " @@ -1470,21 +3176,21 @@ public void testModelLifecycle() throws InterruptedException { ModelId modelId = ModelId.of(MODEL_DATASET, modelName); Model model = bigquery.getModel(modelId); assertNotNull(model); - assertEquals(model.getModelType(), "LINEAR_REGRESSION"); + assertEquals("LINEAR_REGRESSION", model.getModelType()); // Compare the extended model metadata. - assertEquals(model.getFeatureColumns().get(0).getName(), "f1"); - assertEquals(model.getLabelColumns().get(0).getName(), "predicted_label"); + assertEquals("f1", model.getFeatureColumns().get(0).getName()); + assertEquals("predicted_label", model.getLabelColumns().get(0).getName()); assertEquals( - model.getTrainingRuns().get(0).getTrainingOptions().getLearnRateStrategy(), "CONSTANT"); + "CONSTANT", model.getTrainingRuns().get(0).getTrainingOptions().getLearnRateStrategy()); // Mutate metadata. ModelInfo info = model.toBuilder().setDescription("TEST").build(); Model afterUpdate = bigquery.update(info); - assertEquals(afterUpdate.getDescription(), "TEST"); + assertEquals("TEST", afterUpdate.getDescription()); // Ensure model is present in listModels. Page models = bigquery.listModels(MODEL_DATASET); - Boolean found = false; + boolean found = false; for (Model m : models.getValues()) { if (m.getModelId().getModel().equals(modelName)) { found = true; @@ -1498,18 +3204,18 @@ public void testModelLifecycle() throws InterruptedException { } @Test - public void testEmptyListModels() { + void testEmptyListModels() { String datasetId = "test_empty_dataset_list_models_" + RANDOM_ID; assertNotNull(bigquery.create(DatasetInfo.of(datasetId))); Page models = bigquery.listModels(datasetId, BigQuery.ModelListOption.pageSize(100)); assertEquals(0, Iterables.size(models.getValues())); assertFalse(models.hasNextPage()); assertNull(models.getNextPageToken()); - assertTrue(bigquery.delete(datasetId)); + RemoteBigQueryHelper.forceDelete(bigquery, datasetId); } @Test - public void testEmptyListRoutines() { + void testEmptyListRoutines() { String datasetId = "test_empty_dataset_list_routines_" + RANDOM_ID; assertNotNull(bigquery.create(DatasetInfo.of(datasetId))); Page routines = @@ -1517,11 +3223,11 @@ public void testEmptyListRoutines() { assertEquals(0, Iterables.size(routines.getValues())); assertFalse(routines.hasNextPage()); assertNull(routines.getNextPageToken()); - assertTrue(bigquery.delete(datasetId)); + RemoteBigQueryHelper.forceDelete(bigquery, datasetId); } @Test - public void testRoutineLifecycle() throws InterruptedException { + void testRoutineLifecycle() throws InterruptedException { String routineName = RemoteBigQueryHelper.generateRoutineName(); // Create a routine using SQL. String sql = @@ -1535,23 +3241,22 @@ public void testRoutineLifecycle() throws InterruptedException { RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); Routine routine = bigquery.getRoutine(routineId); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); // Mutate metadata. RoutineInfo newInfo = - routine - .toBuilder() + routine.toBuilder() .setBody("x * 4") .setReturnType(routine.getReturnType()) .setArguments(routine.getArguments()) .setRoutineType(routine.getRoutineType()) .build(); Routine afterUpdate = bigquery.update(newInfo); - assertEquals(afterUpdate.getBody(), "x * 4"); + assertEquals("x * 4", afterUpdate.getBody()); // Ensure routine is present in listRoutines. Page routines = bigquery.listRoutines(ROUTINE_DATASET); - Boolean found = false; + boolean found = false; for (Routine r : routines.getValues()) { if (r.getRoutineId().getRoutine().equals(routineName)) { found = true; @@ -1565,7 +3270,7 @@ public void testRoutineLifecycle() throws InterruptedException { } @Test - public void testRoutineAPICreation() { + void testRoutineAPICreation() { String routineName = RemoteBigQueryHelper.generateRoutineName(); RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); RoutineInfo routineInfo = @@ -1583,11 +3288,11 @@ public void testRoutineAPICreation() { Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); } @Test - public void testRoutineAPICreationJavascriptUDF() { + void testRoutineAPICreationJavascriptUDF() { String routineName = RemoteBigQueryHelper.generateRoutineName(); RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); RoutineInfo routineInfo = @@ -1608,14 +3313,69 @@ public void testRoutineAPICreationJavascriptUDF() { Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getLanguage(), "JAVASCRIPT"); - assertEquals(routine.getDeterminismLevel(), "DETERMINISTIC"); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); - assertEquals(routine.getReturnType(), StandardSQLDataType.newBuilder("STRING").build()); + assertEquals("JAVASCRIPT", routine.getLanguage()); + assertEquals("DETERMINISTIC", routine.getDeterminismLevel()); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); + assertEquals(StandardSQLDataType.newBuilder("STRING").build(), routine.getReturnType()); + } + + @Test + void testRoutineAPICreationTVF() { + String routineName = RemoteBigQueryHelper.generateRoutineName(); + RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); + List columns = + ImmutableList.of( + StandardSQLField.newBuilder("x", StandardSQLDataType.newBuilder("INT64").build()) + .build()); + StandardSQLTableType returnTableType = StandardSQLTableType.newBuilder(columns).build(); + RoutineInfo routineInfo = + RoutineInfo.newBuilder(routineId) + .setRoutineType("TABLE_VALUED_FUNCTION") + .setLanguage("SQL") + .setArguments( + ImmutableList.of( + RoutineArgument.newBuilder() + .setName("filter") + .setDataType(StandardSQLDataType.newBuilder("INT64").build()) + .build())) + .setReturnTableType(returnTableType) + .setBody("SELECT x FROM UNNEST([1,2,3]) x WHERE x = filter") + .build(); + Routine routine = bigquery.create(routineInfo); + assertNotNull(routine); + assertEquals("TABLE_VALUED_FUNCTION", routine.getRoutineType()); + assertEquals(returnTableType, routine.getReturnTableType()); + } + + @Test + void testRoutineDataGovernanceType() { + String routineName = RemoteBigQueryHelper.generateRoutineName(); + RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); + RoutineInfo routineInfo = + RoutineInfo.newBuilder(routineId) + .setLanguage("SQL") + .setRoutineType("SCALAR_FUNCTION") + .setBody("x") + .setArguments( + ImmutableList.of( + RoutineArgument.newBuilder() + .setName("x") + .setDataType(StandardSQLDataType.newBuilder("INT64").build()) + .build())) + .setReturnType(StandardSQLDataType.newBuilder("INT64").build()) + .setDataGovernanceType("DATA_MASKING") + .build(); + + Routine routine = bigquery.create(routineInfo); + assertNotNull(routine); + assertEquals("SQL", routine.getLanguage()); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); + assertEquals(StandardSQLDataType.newBuilder("INT64").build(), routine.getReturnType()); + assertEquals("DATA_MASKING", routine.getDataGovernanceType()); } @Test - public void testAuthorizeRoutine() { + void testAuthorizeRoutine() { String routineName = RemoteBigQueryHelper.generateRoutineName(); RoutineId routineId = RoutineId.of(PROJECT_ID, ROUTINE_DATASET, routineName); RoutineInfo routineInfo = @@ -1632,7 +3392,7 @@ public void testAuthorizeRoutine() { .build(); Routine routine = bigquery.create(routineInfo); assertNotNull(routine); - assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); + assertEquals("SCALAR_FUNCTION", routine.getRoutineType()); Dataset routineDataset = bigquery.getDataset(ROUTINE_DATASET); List routineAcl = new ArrayList<>(routineDataset.getAcl()); routineAcl.add(Acl.of(new Acl.Routine(routineId))); @@ -1641,47 +3401,159 @@ public void testAuthorizeRoutine() { } @Test - public void testSingleStatementsQueryException() throws InterruptedException { + void testAuthorizeDataset() { + String datasetName = RemoteBigQueryHelper.generateDatasetName(); + DatasetId datasetId = DatasetId.of(PROJECT_ID, datasetName); + List targetTypes = ImmutableList.of("VIEWS"); + // Specify the acl which will be shared to the authorized dataset + List acl = + ImmutableList.of( + Acl.of(new Acl.Group("projectOwners"), Acl.Role.OWNER), + Acl.of(new Acl.IamMember("allUsers"), Acl.Role.READER)); + DatasetInfo datasetInfo = + DatasetInfo.newBuilder(datasetId).setAcl(acl).setDescription("shared Dataset").build(); + Dataset sharedDataset = bigquery.create(datasetInfo); + assertNotNull(sharedDataset); + assertEquals("shared Dataset", sharedDataset.getDescription()); + // Get the current metadata for the dataset you want to share by calling the datasets.get method + List sharedDatasetAcl = new ArrayList<>(sharedDataset.getAcl()); + + // Create a new dataset to be authorized + String authorizedDatasetName = RemoteBigQueryHelper.generateDatasetName(); + DatasetId authorizedDatasetId = DatasetId.of(PROJECT_ID, authorizedDatasetName); + DatasetInfo authorizedDatasetInfo = + DatasetInfo.newBuilder(authorizedDatasetId) + .setDescription("new Dataset to be authorized by the sharedDataset") + .build(); + Dataset authorizedDataset = bigquery.create(authorizedDatasetInfo); + assertNotNull(authorizedDataset); + assertEquals( + "new Dataset to be authorized by the sharedDataset", authorizedDataset.getDescription()); + + // Add the new DatasetAccessEntry object to the existing sharedDatasetAcl list + DatasetAclEntity datasetEntity = new DatasetAclEntity(authorizedDatasetId, targetTypes); + sharedDatasetAcl.add(Acl.of(datasetEntity)); + + // Update the dataset with the added authorization + Dataset updatedDataset = sharedDataset.toBuilder().setAcl(sharedDatasetAcl).build().update(); + + // Verify that the authorized dataset has been added + assertEquals(sharedDatasetAcl, updatedDataset.getAcl()); + + RemoteBigQueryHelper.forceDelete(bigquery, datasetName); + RemoteBigQueryHelper.forceDelete(bigquery, authorizedDatasetName); + } + + /* TODO(prasmish): replicate the entire test case for executeSelect */ + @Test + void testSingleStatementsQueryException() throws InterruptedException { String invalidQuery = String.format("INSERT %s.%s VALUES('3', 10);", DATASET, TABLE_ID.getTable()); - try { - bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor(); - fail("BigQueryException was expected"); - } catch (BigQueryException ex) { - assertEquals("invalidQuery", ex.getReason()); - assertNotNull(ex.getMessage()); - BigQueryError error = ex.getError(); - assertEquals("invalidQuery", error.getReason()); - assertNotNull(error.getMessage()); - } + BigQueryException exception = + assertThrows( + BigQueryException.class, + () -> bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor(), + "BigQueryException was expected"); + assertEquals("invalidQuery", exception.getReason()); + assertNotNull(exception.getMessage()); + BigQueryError error = exception.getError(); + assertEquals("invalidQuery", error.getReason()); + assertNotNull(error.getMessage()); } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testMultipleStatementsQueryException() throws InterruptedException { + void testMultipleStatementsQueryException() throws InterruptedException { String invalidQuery = String.format( "INSERT %s.%s VALUES('3', 10); DELETE %s.%s where c2=3;", DATASET, TABLE_ID.getTable(), DATASET, TABLE_ID.getTable()); - try { - bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor(); - fail("BigQueryException was expected"); - } catch (BigQueryException ex) { - assertEquals("invalidQuery", ex.getReason()); - assertNotNull(ex.getMessage()); - BigQueryError error = ex.getError(); - assertEquals("invalidQuery", error.getReason()); - assertNotNull(error.getMessage()); + BigQueryException exception = + assertThrows( + BigQueryException.class, + () -> bigquery.create(JobInfo.of(QueryJobConfiguration.of(invalidQuery))).waitFor(), + "BigQueryException was expected"); + assertEquals("invalidQuery", exception.getReason()); + assertNotNull(exception.getMessage()); + BigQueryError error = exception.getError(); + assertEquals("invalidQuery", error.getReason()); + assertNotNull(error.getMessage()); + } + + @Test + void testTimestamp() throws InterruptedException { + String query = "SELECT TIMESTAMP '2022-01-24T23:54:25.095574Z'"; + String timestampStringValueExpected = "2022-01-24T23:54:25.095574Z"; + + TableResult resultInteractive = + bigquery.query( + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .build()); + assertNotNull(resultInteractive.getJobId()); + for (FieldValueList row : resultInteractive.getValues()) { + FieldValue timeStampCell = row.get(0); + Instant timestampStringValueActual = timeStampCell.getTimestampInstant(); + assertEquals(timestampStringValueExpected, timestampStringValueActual.toString()); + } + } + + @Test + void testLosslessTimestamp() throws InterruptedException { + String query = "SELECT TIMESTAMP '2022-01-24T23:54:25.095574Z'"; + long expectedTimestamp = 1643068465095574L; + + TableResult result = + bigquery.query( + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .build()); + assertNotNull(result.getJobId()); + for (FieldValueList row : result.getValues()) { + FieldValue timeStampCell = row.get(0); + assertFalse(timeStampCell.getUseInt64Timestamps()); + assertEquals(expectedTimestamp, timeStampCell.getTimestampValue()); + } + + // Create new BQ object to toggle lossless timestamps without affecting + // other tests. + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + DataFormatOptions dataFormatOptions = + DataFormatOptions.newBuilder().useInt64Timestamp(true).build(); + BigQueryOptions options = + bigqueryHelper.getOptions().toBuilder().setDataFormatOptions(dataFormatOptions).build(); + BigQuery bigqueryLossless = options.getService(); + + TableResult resultLossless = + bigqueryLossless.query( + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .build()); + assertNotNull(resultLossless.getJobId()); + for (FieldValueList row : resultLossless.getValues()) { + FieldValue timeStampCellLossless = row.get(0); + assertTrue(timeStampCellLossless.getUseInt64Timestamps()); + assertEquals(expectedTimestamp, timeStampCellLossless.getTimestampValue()); } } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testQuery() throws InterruptedException { + void testQuery() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); QueryJobConfiguration config = - QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); + QueryJobConfiguration.newBuilder(query) + // Disable the cache as query plans do not exist from cached results + // This will force generation of execution plan + .setUseQueryCache(false) + .setDefaultDataset(DatasetId.of(DATASET)) + .build(); Job job = bigquery.create(JobInfo.of(JobId.of(), config)); + job = job.waitFor(); + assertNotNull(job); TableResult result = job.getQueryResults(); + assertNotNull(result.getJobId()); assertEquals(QUERY_RESULT_SCHEMA, result.getSchema()); int rowCount = 0; for (FieldValueList row : result.getValues()) { @@ -1696,25 +3568,210 @@ public void testQuery() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); rowCount++; } assertEquals(2, rowCount); + // Query Plan will exist for a completed job Job job2 = bigquery.getJob(job.getJobId()); JobStatistics.QueryStatistics statistics = job2.getStatistics(); assertNotNull(statistics.getQueryPlan()); } @Test - public void testQueryCaseInsensitiveSchemaFieldByGetName() throws InterruptedException { - String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); + void testQueryStatistics() throws InterruptedException { + // Use CURRENT_TIMESTAMP to avoid potential caching. + String query = "SELECT CURRENT_TIMESTAMP() AS ts"; QueryJobConfiguration config = - QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseQueryCache(false) + .build(); Job job = bigquery.create(JobInfo.of(JobId.of(), config)); + job = job.waitFor(); - TableResult result = job.getQueryResults(); - assertEquals(QUERY_RESULT_SCHEMA, result.getSchema()); + JobStatistics.QueryStatistics statistics = job.getStatistics(); + assertNotNull(statistics.getQueryPlan()); + assertThat(statistics.getTotalSlotMs()).isGreaterThan(0L); + } + + @Test + void testExecuteSelectDefaultConnectionSettings() throws SQLException { + // Use the default connection settings + Connection connection = bigquery.createConnection(); + String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; + BigQueryResult bigQueryResult = connection.executeSelect(query); + assertEquals(42, bigQueryResult.getTotalRows()); + assertFalse(bigQueryResult.getBigQueryResultStats().getQueryStatistics().getUseReadApi()); + } + + @Test + void testExecuteSelectWithReadApi() throws SQLException { + final int rowLimit = 5000; + final String QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 LIMIT %s"; + bigquery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_REQUIRED); + // Job timeout is somewhat arbitrary - just ensures that fast query is not used. + // min result size and page row count ratio ensure that the ReadAPI is used. + ConnectionSettings connectionSettingsReadAPIEnabledFastQueryDisabled = + ConnectionSettings.newBuilder() + .setUseReadAPI(true) + .setJobTimeoutMs(Long.MAX_VALUE) + .setMinResultSize(500) + .setTotalToPageRowCountRatio(1) + .build(); + + Connection connectionReadAPIEnabled = + bigquery.createConnection(connectionSettingsReadAPIEnabledFastQueryDisabled); + + String selectQuery = String.format(QUERY, rowLimit); + + BigQueryResult bigQueryResultSet = connectionReadAPIEnabled.executeSelect(selectQuery); + ResultSet rs = bigQueryResultSet.getResultSet(); + // Paginate results to avoid an InterruptedException + while (rs.next()) {} + + assertTrue(bigQueryResultSet.getBigQueryResultStats().getQueryStatistics().getUseReadApi()); + connectionReadAPIEnabled.close(); + } + + @Test + void testExecuteSelectWithFastQueryReadApi() throws SQLException { + final int rowLimit = 5000; + final String QUERY = + "SELECT * FROM bigquery-public-data.new_york_taxi_trips.tlc_yellow_trips_2017 LIMIT %s"; + // min result size and page row count ratio ensure that the ReadAPI is used. + ConnectionSettings connectionSettingsReadAPIEnabledFastQueryDisabled = + ConnectionSettings.newBuilder() + .setUseReadAPI(true) + .setMinResultSize(500) + .setTotalToPageRowCountRatio(1) + .build(); + + Connection connectionReadAPIEnabled = + bigquery.createConnection(connectionSettingsReadAPIEnabledFastQueryDisabled); + + String selectQuery = String.format(QUERY, rowLimit); + + BigQueryResult bigQueryResultSet = connectionReadAPIEnabled.executeSelect(selectQuery); + ResultSet rs = bigQueryResultSet.getResultSet(); + // Paginate results to avoid an InterruptedException + while (rs.next()) {} + + assertTrue(bigQueryResultSet.getBigQueryResultStats().getQueryStatistics().getUseReadApi()); + connectionReadAPIEnabled.close(); + } + + @Test + void testExecuteSelectReadApiEmptyResultSet() throws SQLException { + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setJobTimeoutMs( + Long.MAX_VALUE) // Force executeSelect to use ReadAPI instead of fast query. + .setUseReadAPI(true) + .setUseQueryCache(false) + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + String query = "SELECT TIMESTAMP '2022-01-24T23:54:25.095574Z' LIMIT 0"; + BigQueryResult bigQueryResult = connection.executeSelect(query); + + ResultSet rs = bigQueryResult.getResultSet(); + assertThat(rs.next()).isFalse(); + assertThat(bigQueryResult.getTotalRows()).isEqualTo(0); + } + + @Test + void testExecuteSelectWithCredentials() throws SQLException { + // This test validate that executeSelect uses the same credential provided by the BigQuery + // object used to create the Connection client. + // This is done the following scenarios: + // 1. Validate that setting a valid credential executes the query. + // 2. Validate that setting an invalid credential causes failure. + + // Scenario 1. + // Create a new bigQuery object but explicitly set the credentials. + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQueryOptions bigQueryOptions = + bigqueryHelper.getOptions().toBuilder() + .setCredentials(bigquery.getOptions().getCredentials()) + .build(); + BigQuery bigQueryGoodCredentials = bigQueryOptions.getService(); + + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setPriority(Priority.INTERACTIVE) // Force non-fast query to use BigQueryReadClient. + .setDefaultDataset(DatasetId.of(DATASET)) + .build(); + Connection connectionGoodCredentials = + bigQueryGoodCredentials.createConnection(connectionSettings); + String query = + "SELECT * FROM " + + TABLE_ID_LARGE.getTable(); // Large query result is needed to use BigQueryReadClient. + BigQueryResult bigQueryResult = connectionGoodCredentials.executeSelect(query); + assertEquals(313348, bigQueryResult.getTotalRows()); + assertTrue(bigQueryResult.getBigQueryResultStats().getQueryStatistics().getUseReadApi()); + + // Scenario 2. + // Create a new bigQuery object but explicitly an invalid credential. + BigQueryOptions bigQueryOptionsBadCredentials = + bigqueryHelper.getOptions().toBuilder() + .setCredentials(loadCredentials(FAKE_JSON_CRED_WITH_GOOGLE_DOMAIN)) + .build(); + BigQuery bigQueryBadCredentials = bigQueryOptionsBadCredentials.getService(); + Connection connectionBadCredentials = + bigQueryBadCredentials.createConnection(connectionSettings); + assertThrows(BigQuerySQLException.class, () -> connectionBadCredentials.executeSelect(query)); + } + + /* TODO(prasmish): replicate the entire test case for executeSelect */ + @Test + void testQueryTimeStamp() throws InterruptedException { + String query = "SELECT TIMESTAMP '2022-01-24T23:54:25.095574Z'"; + Instant beforeQueryInstant = Instant.parse("2022-01-24T23:54:25.095574Z"); + long microsBeforeQuery = + TimeUnit.SECONDS.toMicros(beforeQueryInstant.getEpochSecond()) + + TimeUnit.NANOSECONDS.toMicros(beforeQueryInstant.getNano()); + + // Verify that timestamp remains the same when priority is set to INTERACTIVE + TableResult result = + bigquery.query( + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .setPriority(QueryJobConfiguration.Priority.INTERACTIVE) + .build()); + assertNotNull(result.getJobId()); + for (FieldValueList row : result.getValues()) { + FieldValue timeStampCell = row.get(0); + long microsAfterQuery = timeStampCell.getTimestampValue(); + assertEquals(microsBeforeQuery, microsAfterQuery); + } + + // Verify that timestamp remains the same without priority set to INTERACTIVE + TableResult resultInteractive = + bigquery.query( + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .build()); + assertNotNull(resultInteractive.getJobId()); + for (FieldValueList row : resultInteractive.getValues()) { + FieldValue timeStampCell = row.get(0); + long microsAfterQuery = timeStampCell.getTimestampValue(); + assertEquals(microsBeforeQuery, microsAfterQuery); + } + } + + /* TODO(prasmish): replicate the entire test case for executeSelect */ + @Test + void testQueryCaseInsensitiveSchemaFieldByGetName() throws InterruptedException { + String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); + Job job = bigquery.create(JobInfo.of(JobId.of(), config)); + + TableResult result = job.getQueryResults(); + assertNotNull(result.getJobId()); + assertEquals(QUERY_RESULT_SCHEMA, result.getSchema()); int rowCount = 0; for (FieldValueList row : result.getValues()) { FieldValue timestampCell = row.get(0); @@ -1734,8 +3791,9 @@ public void testQueryCaseInsensitiveSchemaFieldByGetName() throws InterruptedExc assertEquals(2, rowCount); } + /* TODO(prasmish): replicate bigquery.query part of the test case for executeSelect - modify this test case */ @Test - public void testQueryExternalHivePartitioningOptionAutoLayout() throws InterruptedException { + void testQueryExternalHivePartitioningOptionAutoLayout() throws InterruptedException { String tableName = "test_queryexternalhivepartition_autolayout_table"; String sourceUri = "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/hive-partitioning-samples/autolayout/*"; @@ -1748,16 +3806,20 @@ public void testQueryExternalHivePartitioningOptionAutoLayout() throws Interrupt .setSourceUriPrefix(sourceUriPrefix) .build(); TableId tableId = TableId.of(DATASET, tableName); + ParquetOptions parquetOptions = + ParquetOptions.newBuilder().setEnableListInference(true).setEnumAsString(true).build(); ExternalTableDefinition externalTable = ExternalTableDefinition.newBuilder(sourceUri, FormatOptions.parquet()) .setAutodetect(true) .setHivePartitioningOptions(hivePartitioningOptions) + .setFormatOptions(parquetOptions) .build(); assertNotNull(bigquery.create(TableInfo.of(tableId, externalTable))); String query = String.format( "SELECT COUNT(*) as ct FROM %s.%s WHERE dt=\"2020-11-15\"", DATASET, tableName); TableResult result = bigquery.query(QueryJobConfiguration.of(query)); + assertNotNull(result.getJobId()); for (FieldValueList fieldValues : result.iterateAll()) { assertEquals(50, fieldValues.get("ct").getLongValue()); } @@ -1765,8 +3827,9 @@ public void testQueryExternalHivePartitioningOptionAutoLayout() throws Interrupt assertTrue(bigquery.delete(tableId)); } + /* TODO(prasmish): replicate bigquery.query part of the test case for executeSelect - modify this test case */ @Test - public void testQueryExternalHivePartitioningOptionCustomLayout() throws InterruptedException { + void testQueryExternalHivePartitioningOptionCustomLayout() throws InterruptedException { String tableName = "test_queryexternalhivepartition_customlayout_table"; String sourceUri = "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/hive-partitioning-samples/customlayout/*"; @@ -1774,6 +3837,8 @@ public void testQueryExternalHivePartitioningOptionCustomLayout() throws Interru "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/hive-partitioning-samples/customlayout/{pkey:STRING}/"; + ParquetOptions parquetOptions = + ParquetOptions.newBuilder().setEnableListInference(true).setEnumAsString(true).build(); HivePartitioningOptions hivePartitioningOptions = HivePartitioningOptions.newBuilder() .setMode("CUSTOM") @@ -1785,11 +3850,13 @@ public void testQueryExternalHivePartitioningOptionCustomLayout() throws Interru ExternalTableDefinition.newBuilder(sourceUri, FormatOptions.parquet()) .setAutodetect(true) .setHivePartitioningOptions(hivePartitioningOptions) + .setFormatOptions(parquetOptions) .build(); assertNotNull(bigquery.create(TableInfo.of(tableId, externalTable))); String query = String.format("SELECT COUNT(*) as ct FROM %s.%s WHERE pkey=\"foo\"", DATASET, tableName); TableResult result = bigquery.query(QueryJobConfiguration.of(query)); + assertNotNull(result.getJobId()); for (FieldValueList fieldValues : result.iterateAll()) { assertEquals(50, fieldValues.get("ct").getLongValue()); } @@ -1798,12 +3865,736 @@ public void testQueryExternalHivePartitioningOptionCustomLayout() throws Interru } @Test - public void testFastQueryMultipleRuns() throws InterruptedException { + void testConnectionImplDryRun() throws SQLException { + String query = + String.format( + "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from %s where StringField = ? order by TimestampField", + TABLE_ID_FAST_QUERY_BQ_RESULTSET.getTable()); + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setCreateSession(true) + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryDryRunResult bigQueryDryRunResultSet = connection.dryRun(query); + assertNotNull(bigQueryDryRunResultSet.getSchema()); + assertEquals( + BQ_RESULTSET_EXPECTED_SCHEMA, bigQueryDryRunResultSet.getSchema()); // match the schema + List queryParameters = bigQueryDryRunResultSet.getQueryParameters(); + assertEquals(StandardSQLTypeName.STRING, queryParameters.get(0).getValue().getType()); + QueryStatistics queryStatistics = bigQueryDryRunResultSet.getStatistics().getQueryStatistics(); + assertNotNull(queryStatistics); + SessionInfo sessionInfo = bigQueryDryRunResultSet.getStatistics().getSessionInfo(); + assertNotNull(sessionInfo.getSessionId()); + assertEquals(StatementType.SELECT, queryStatistics.getStatementType()); + } + + @Test + void testConnectionImplDryRunNoQueryParameters() throws SQLException { + String query = + String.format( + "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, " + + "TimestampField, FloatField, NumericField, TimeField, DateField, DateTimeField, " + + "GeographyField, RecordField.BytesField, RecordField.BooleanField, " + + "IntegerArrayField from %s order by TimestampField", + TABLE_ID_FAST_QUERY_BQ_RESULTSET.getTable()); + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setCreateSession(true) + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryDryRunResult bigQueryDryRunResultSet = connection.dryRun(query); + assertNotNull(bigQueryDryRunResultSet.getSchema()); + assertEquals( + BQ_RESULTSET_EXPECTED_SCHEMA, bigQueryDryRunResultSet.getSchema()); // match the schema + List queryParameters = bigQueryDryRunResultSet.getQueryParameters(); + assertEquals(0, queryParameters.size()); + QueryStatistics queryStatistics = bigQueryDryRunResultSet.getStatistics().getQueryStatistics(); + assertNotNull(queryStatistics); + SessionInfo sessionInfo = bigQueryDryRunResultSet.getStatistics().getSessionInfo(); + assertNotNull(sessionInfo.getSessionId()); + assertEquals(StatementType.SELECT, queryStatistics.getStatementType()); + } + + @Test + // This test case test the order of the records, making sure that the result is not jumbled up due + // to the multithreaded BigQueryResult implementation + void testBQResultSetMultiThreadedOrder() throws SQLException { + String query = + "SELECT date FROM " + + TABLE_ID_LARGE.getTable() + + " where date is not null order by date asc limit 300000"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setNumBufferedRows(10000) // page size + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + assertTrue(rs.next()); + ++cnt; + java.sql.Date lastDate = rs.getDate(0); + while (rs.next()) { + assertNotNull(rs.getDate(0)); + assertTrue(rs.getDate(0).getTime() >= lastDate.getTime()); // sorted order is maintained + lastDate = rs.getDate(0); + ++cnt; + } + assertEquals(300000, cnt); // total 300000 rows should be read + } + + @Test + void testBQResultSetPaginationSlowQuery() throws SQLException { + String query = + "SELECT date, county, state_name, confirmed_cases, deaths FROM " + + TABLE_ID_LARGE.getTable() + + " where date is not null and county is not null and state_name is not null order by date limit 300000"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setNumBufferedRows(10000) // page size + .setJobTimeoutMs( + 15000L) // So that ConnectionImpl.isFastQuerySupported returns false, and the slow + // query route gets executed + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + while (rs.next()) { // pagination starts after approx 120,000 records + assertNotNull(rs.getDate(0)); + assertNotNull(rs.getString(1)); + assertNotNull(rs.getString(2)); + assertTrue(rs.getInt(3) >= 0); + assertTrue(rs.getInt(4) >= 0); + ++cnt; + } + assertEquals(300000, cnt); // total 300000 rows should be read + } + + @Test + void testExecuteSelectSinglePageTableRow() throws SQLException { + String query = + "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " + + TABLE_ID_FAST_QUERY_BQ_RESULTSET.getTable() + + " order by TimestampField"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet rs = bigQueryResult.getResultSet(); + Schema sc = bigQueryResult.getSchema(); + + assertEquals(BQ_RESULTSET_EXPECTED_SCHEMA, sc); // match the schema + assertEquals(2, bigQueryResult.getTotalRows()); // Expecting 2 rows + + assertTrue(rs.next()); // first row + // checking for the null or 0 column values + assertNull(rs.getString("StringField")); + assertEquals(0.0d, rs.getDouble("BigNumericField"), 1e-9); + assertFalse(rs.getBoolean("BooleanField")); + assertNull(rs.getBytes("BytesField")); + assertEquals(0, rs.getInt("IntegerField")); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getDate("DateField")); + assertEquals(0.0d, rs.getDouble("FloatField"), 1e-9); + assertEquals(0.0d, rs.getDouble("NumericField"), 1e-9); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getString("DateTimeField")); + assertNull(rs.getString("GeographyField")); + assertNull(rs.getBytes("BytesField_1")); + assertFalse(rs.getBoolean("BooleanField_1")); + + assertTrue(rs.next()); // second row + // second row is non null, comparing the values + assertEquals("StringValue1", rs.getString("StringField")); + assertEquals(0.3333333333333333d, rs.getDouble("BigNumericField"), 1e-9); + assertFalse(rs.getBoolean("BooleanField")); + assertNotNull(rs.getBytes("BytesField")); + assertEquals(1, rs.getInt("IntegerField")); + assertEquals(1534680695123L, rs.getTimestamp("TimestampField").getTime()); + assertEquals(java.sql.Date.valueOf("2018-08-19"), rs.getDate("DateField")); + assertEquals(10.1d, rs.getDouble("FloatField"), 1e-9); + assertEquals(100.0d, rs.getDouble("NumericField"), 1e-9); + assertEquals(Time.valueOf(LocalTime.of(12, 11, 35, 123456)), rs.getTime("TimeField")); + assertEquals("2018-08-19T12:11:35.123456", rs.getString("DateTimeField")); + assertEquals("POINT(-122.35022 47.649154)", rs.getString("GeographyField")); + assertNotNull(rs.getBytes("BytesField_1")); + assertTrue(rs.getBoolean("BooleanField_1")); + assertTrue( + rs.getObject("IntegerArrayField") instanceof com.google.cloud.bigquery.FieldValueList); + FieldValueList integerArrayFieldValue = + (com.google.cloud.bigquery.FieldValueList) rs.getObject("IntegerArrayField"); + assertEquals(4, integerArrayFieldValue.size()); // Array has 4 elements + assertEquals(3, (integerArrayFieldValue.get(2).getNumericValue()).intValue()); + List integerArrayFieldValueList = + (List) rs.getArray("IntegerArrayField").getArray(); + assertEquals(4, integerArrayFieldValueList.size()); + assertEquals(3, integerArrayFieldValueList.get(2).getNumericValue().intValue()); + + assertFalse(rs.next()); // no 3rd row in the table + } + + @Test + void testExecuteSelectSinglePageTableRowWithReadAPI() throws SQLException { + String query = + "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " + + TABLE_ID_FAST_QUERY_BQ_RESULTSET.getTable() + + " order by TimestampField"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseReadAPI(true) + .setMinResultSize(1) + .setTotalToPageRowCountRatio(1) + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + assertTrue(bigQueryResult.getBigQueryResultStats().getQueryStatistics().getUseReadApi()); + ResultSet rs = bigQueryResult.getResultSet(); + Schema sc = bigQueryResult.getSchema(); + + assertEquals(BQ_RESULTSET_EXPECTED_SCHEMA, sc); // match the schema + assertEquals(2, bigQueryResult.getTotalRows()); // Expecting 2 rows + + assertTrue(rs.next()); // first row + // checking for the null or 0 column values + assertNull(rs.getString("StringField")); + assertEquals(0.0d, rs.getDouble("BigNumericField"), 1e-9); + assertFalse(rs.getBoolean("BooleanField")); + assertNull(rs.getBytes("BytesField")); + assertEquals(0, rs.getInt("IntegerField")); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getDate("DateField")); + assertEquals(0.0d, rs.getDouble("FloatField"), 1e-9); + assertEquals(0.0d, rs.getDouble("NumericField"), 1e-9); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getString("DateTimeField")); + assertNull(rs.getString("GeographyField")); + assertNull(rs.getBytes("BytesField_1")); + assertFalse(rs.getBoolean("BooleanField_1")); + + assertTrue(rs.next()); // second row + // second row is non null, comparing the values + assertEquals("StringValue1", rs.getString("StringField")); + assertEquals(0.3333333333333333d, rs.getDouble("BigNumericField"), 1e-9); + assertFalse(rs.getBoolean("BooleanField")); + assertNotNull(rs.getBytes("BytesField")); + assertEquals(1, rs.getInt("IntegerField")); + assertEquals(1534680695123L, rs.getTimestamp("TimestampField").getTime()); + assertEquals(java.sql.Date.valueOf("2018-08-19"), rs.getDate("DateField")); + assertEquals(10.1d, rs.getDouble("FloatField"), 1e-9); + assertEquals(100.0d, rs.getDouble("NumericField"), 1e-9); + assertEquals( + Time.valueOf(LocalTime.of(12, 11, 35, 123456)).toString(), + rs.getTime("TimeField").toString()); + assertEquals("2018-08-19T12:11:35.123456", rs.getString("DateTimeField")); + assertEquals("POINT(-122.35022 47.649154)", rs.getString("GeographyField")); + assertNotNull(rs.getBytes("BytesField_1")); + assertTrue(rs.getBoolean("BooleanField_1")); + List integerArray = (List) rs.getArray("IntegerArrayField").getArray(); + assertEquals(4, integerArray.size()); + assertEquals(3, integerArray.get(2).intValue()); + + assertFalse(rs.next()); // no 3rd row in the table + } + + @Test + void testConnectionClose() throws SQLException { + String query = + "SELECT date, county, state_name, confirmed_cases, deaths FROM " + + TABLE_ID_LARGE.getTable() + + " where date is not null and county is not null and state_name is not null order by date limit 300000"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setNumBufferedRows(10000) // page size + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + while (rs.next()) { + ++cnt; + if (cnt == 57000) { // breaking at 57000th record, query reads 300K + assertTrue(connection.close()); // we should be able to cancel the connection + } + } + assertTrue(cnt < 100000); // Extra records are still read even after canceling, as + // the backgrounds threads are still active while the interrupt occurs and the + // buffer and pageCache are cleared + } + + @Test + void testBQResultSetPagination() throws SQLException { + String query = + "SELECT date, county, state_name, confirmed_cases, deaths FROM " + + TABLE_ID_LARGE.getTable() + + " where date is not null and county is not null and state_name is not null order by date limit 300000"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setNumBufferedRows(10000) // page size + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + while (rs.next()) { // pagination starts after approx 120,000 records + assertNotNull(rs.getDate(0)); + assertNotNull(rs.getString(1)); + assertNotNull(rs.getString(2)); + assertTrue(rs.getInt(3) >= 0); + assertTrue(rs.getInt(4) >= 0); + ++cnt; + } + assertEquals(300000, cnt); // total 300000 rows should be read + } + + @Test + void testReadAPIIterationAndOrder() + throws SQLException { // use read API to read 300K records and check the order + String query = + "SELECT date, county, state_name, confirmed_cases, deaths FROM " + + TABLE_ID_LARGE.getTable() + + " where date is not null and county is not null and state_name is not null order by confirmed_cases asc limit 300000"; + + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setPriority( + QueryJobConfiguration.Priority + .INTERACTIVE) // required for this integration test so that isFastQuerySupported + // returns false + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + int lasConfirmedCases = Integer.MIN_VALUE; + while (rs.next()) { // pagination starts after approx 120,000 records + assertNotNull(rs.getDate(0)); + assertNotNull(rs.getString(1)); + assertNotNull(rs.getString(2)); + assertTrue(rs.getInt(3) >= 0); + assertTrue(rs.getInt(4) >= 0); + + // check if the records are sorted + assertTrue(rs.getInt(3) >= lasConfirmedCases); + lasConfirmedCases = rs.getInt(3); + ++cnt; + } + assertEquals(300000, cnt); // total 300000 rows should be read + connection.close(); + } + + @Test + void testReadAPIIterationAndOrderAsync() + throws SQLException, + ExecutionException, + InterruptedException { // use read API to read 300K records and check the order + String query = + "SELECT date, county, state_name, confirmed_cases, deaths / 10 FROM " + + TABLE_ID_LARGE.getTable() + + " where date is not null and county is not null and state_name is not null order by confirmed_cases asc limit 300000"; + + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setPriority( + QueryJobConfiguration.Priority + .INTERACTIVE) // required for this integration test so that isFastQuerySupported + // returns false + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + + ListenableFuture executeSelectFut = connection.executeSelectAsync(query); + ExecuteSelectResponse exSelRes = executeSelectFut.get(); + BigQueryResult bigQueryResult = exSelRes.getResultSet(); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + int lasConfirmedCases = Integer.MIN_VALUE; + while (rs.next()) { // pagination starts after approx 120,000 records + assertNotNull(rs.getDate(0)); + assertNotNull(rs.getString(1)); + assertNotNull(rs.getString(2)); + assertTrue(rs.getInt(3) >= 0); + assertTrue(rs.getDouble(3) >= 0); + assertTrue(rs.getDouble(4) >= 0); + + // check if the records are sorted + assertTrue(rs.getInt(3) >= lasConfirmedCases); + lasConfirmedCases = rs.getInt(3); + ++cnt; + } + assertEquals(300000, cnt); // total 300000 rows should be read + connection.close(); + } + + @Test + // Cancel the future and check if the operations got cancelled. Tests the wiring of future + // callback. + // TODO(prasmish): Remove this test case if it turns out to be flaky, as expecting the process to + // be uncompleted in 1000ms is nondeterministic! Though very likely it won't be complete in the + // specified amount of time + void testExecuteSelectAsyncCancel() + throws SQLException, + ExecutionException, + InterruptedException { // use read API to read 300K records and check the order + String query = + "SELECT date, county, state_name, confirmed_cases, deaths FROM " + + TABLE_ID_LARGE.getTable() + + " where date is not null and county is not null and state_name is not null order by confirmed_cases asc limit 300000"; + + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setPriority( + QueryJobConfiguration.Priority + .INTERACTIVE) // required for this integration test so that isFastQuerySupported + // returns false + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + + ListenableFuture executeSelectFut = connection.executeSelectAsync(query); + + // Cancel the future with 1000ms delay + Thread testCloseAsync = + new Thread( + () -> { + try { + Thread.sleep(1000); + executeSelectFut.cancel(true); + } catch (InterruptedException e) { + assertNotNull(e); + } + }); + testCloseAsync.start(); + + assertThrows(CancellationException.class, executeSelectFut::get); + } + + @Test + // Timeouts the future and check if the operations got cancelled. + // TODO(prasmish): Remove this test case if it turns out to be flaky, as expecting the process to + // be uncompleted in 1000ms is nondeterministic! Though very likely it won't be complete in the + // specified amount of time + void testExecuteSelectAsyncTimeout() + throws SQLException, + ExecutionException, + InterruptedException { // use read API to read 300K records and check the order + String query = + "SELECT date, county, state_name, confirmed_cases, deaths FROM " + + TABLE_ID_LARGE.getTable() + + " where date is not null and county is not null and state_name is not null order by confirmed_cases asc limit 300000"; + + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setPriority( + QueryJobConfiguration.Priority + .INTERACTIVE) // required for this integration test so that isFastQuerySupported + // returns false + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + + ListenableFuture executeSelectFut = connection.executeSelectAsync(query); + + try { + executeSelectFut.get(1000, TimeUnit.MILLISECONDS); + fail(); // this line should not be reached + } catch (CancellationException | TimeoutException e) { + assertNotNull(e); + } + } + + @Test + void testExecuteSelectWithNamedQueryParametersAsync() + throws BigQuerySQLException, ExecutionException, InterruptedException { + String query = + "SELECT TimestampField, StringField, BooleanField FROM " + + TABLE_ID.getTable() + + " WHERE StringField = @stringParam" + + " AND IntegerField IN UNNEST(@integerList)"; + QueryParameterValue stringParameter = QueryParameterValue.string("stringValue"); + QueryParameterValue intArrayParameter = + QueryParameterValue.array(new Integer[] {3, 4}, Integer.class); + Parameter stringParam = + Parameter.newBuilder().setName("stringParam").setValue(stringParameter).build(); + Parameter intArrayParam = + Parameter.newBuilder().setName("integerList").setValue(intArrayParameter).build(); + + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + Connection connection = bigquery.createConnection(connectionSettings); + List parameters = ImmutableList.of(stringParam, intArrayParam); + + ListenableFuture executeSelectFut = + connection.executeSelectAsync(query, parameters); + ExecuteSelectResponse exSelRes = executeSelectFut.get(); + BigQueryResult rs = exSelRes.getResultSet(); + assertEquals(2, rs.getTotalRows()); + } + + // Ref: https://github.com/googleapis/java-bigquery/issues/2070. Adding a pre-submit test to see + // if bigquery.createConnection() returns null + @Test + void testCreateDefaultConnection() throws BigQuerySQLException { + Connection connection = bigquery.createConnection(); + assertNotNull(connection, "bigquery.createConnection() returned null"); + assertTrue(connection.close()); + } + + @Test + void testReadAPIConnectionMultiClose() + throws + SQLException { // use read API to read 300K records, then closes the connection. This test + // repeats it multiple times and assets if the connection was closed + String query = + "SELECT date, county, state_name, confirmed_cases, deaths FROM " + + TABLE_ID_LARGE.getTable() + + " where date is not null and county is not null and state_name is not null order by confirmed_cases asc limit 300000"; + + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setPriority( + QueryJobConfiguration.Priority + .INTERACTIVE) // required for this integration test so that isFastQuerySupported + // returns false + .build(); + int closeCnt = 0, runCnt = 3; + for (int run = 0; run < runCnt; run++) { + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + while (rs.next()) { // pagination starts after approx 120,000 records + assertNotNull(rs.getDate(0)); + ++cnt; + } + assertEquals(300000, cnt); // total 300000 rows should be read + assertTrue(connection.close()); // check if connection closed + closeCnt++; + } + assertEquals( + closeCnt, runCnt); // check if the connection closed for the required number of times + } + + @Test + void testExecuteSelectSinglePageTableRowColInd() throws SQLException { + String query = + "select StringField, BigNumericField, BooleanField, BytesField, IntegerField, TimestampField, FloatField, " + + "NumericField, TimeField, DateField, DateTimeField , GeographyField, RecordField.BytesField, RecordField.BooleanField, IntegerArrayField from " + + TABLE_ID_FAST_QUERY_BQ_RESULTSET.getTable() + + " order by TimestampField"; + /* + Column Index mapping for ref: + StringField, 0 BigNumericField, 1 BooleanField, 2 BytesField, 3 IntegerField, 4 TimestampField, 5 FloatField, " 6 + NumericField, 7 TimeField, 8 DateField, 9 DateTimeField , 10 GeographyField, 11 RecordField.BytesField, 12 RecordField.BooleanField, 13 IntegerArrayField 14 + */ + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet rs = bigQueryResult.getResultSet(); + Schema sc = bigQueryResult.getSchema(); + + assertEquals(BQ_RESULTSET_EXPECTED_SCHEMA, sc); // match the schema + assertEquals(2, bigQueryResult.getTotalRows()); // Expecting 2 rows + while (rs.next()) { + assertEquals(rs.getString(0), rs.getString("StringField")); + assertEquals(rs.getDouble(1), rs.getDouble("BigNumericField"), 1e-9); + assertEquals(rs.getBoolean(2), rs.getBoolean("BooleanField")); + if (rs.getBytes(3) == null) { // both overloads should be null + assertEquals(rs.getBytes(3), rs.getBytes("BytesField")); + } else { // value in String representation should be the same + assertEquals( + new String(rs.getBytes(3), StandardCharsets.UTF_8), + new String(rs.getBytes("BytesField"), StandardCharsets.UTF_8)); + } + assertEquals(rs.getInt(4), rs.getInt("IntegerField")); + assertEquals(rs.getTimestamp(5), rs.getTimestamp("TimestampField")); + assertEquals(rs.getDate(9), rs.getDate("DateField")); + assertEquals(rs.getDouble("FloatField"), rs.getDouble(6), 1e-9); + assertEquals(rs.getDouble("NumericField"), rs.getDouble(7), 1e-9); + assertEquals(rs.getTime(8), rs.getTime("TimeField")); + assertEquals(rs.getString(10), rs.getString("DateTimeField")); + assertEquals(rs.getString(11), rs.getString("GeographyField")); + if (rs.getBytes(12) == null) { // both overloads should be null + assertEquals(rs.getBytes(12), rs.getBytes("BytesField_1")); + } else { // value in String representation should be the same + assertEquals( + new String(rs.getBytes(12), StandardCharsets.UTF_8), + new String(rs.getBytes("BytesField_1"), StandardCharsets.UTF_8)); + } + assertEquals(rs.getBoolean(13), rs.getBoolean("BooleanField_1")); + assertTrue( + rs.getObject("IntegerArrayField") instanceof com.google.cloud.bigquery.FieldValueList); + FieldValueList integerArrayFieldValue = + (com.google.cloud.bigquery.FieldValueList) rs.getObject("IntegerArrayField"); + assertTrue(rs.getObject(14) instanceof com.google.cloud.bigquery.FieldValueList); + FieldValueList integerArrayFieldValueColInd = + (com.google.cloud.bigquery.FieldValueList) rs.getObject(14); + assertEquals( + integerArrayFieldValue.size(), + integerArrayFieldValueColInd.size()); // Array has 4 elements + if (integerArrayFieldValue.size() == 4) { // as we are picking the third index + assertEquals( + (integerArrayFieldValue.get(2).getNumericValue()).intValue(), + (integerArrayFieldValueColInd.get(2).getNumericValue()).intValue()); + } + + List integerArrayFieldValueList = + (List) rs.getArray("IntegerArrayField").getArray(); + List integerArrayFieldValueListColInd = + (List) rs.getArray(14).getArray(); + assertEquals( + integerArrayFieldValueList.size(), + integerArrayFieldValueListColInd.size()); // Array has 4 elements + if (integerArrayFieldValueList.size() == 4) { // as we are picking the third index + assertEquals( + (integerArrayFieldValueList.get(2).getNumericValue()).intValue(), + (integerArrayFieldValueListColInd.get(2).getNumericValue()).intValue()); + } + } + } + + @Test + void testExecuteSelectStruct() throws SQLException { + String query = "select (STRUCT(\"Vancouver\" as city, 5 as years)) as address"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + assertEquals(1, bigQueryResult.getTotalRows()); + + Schema schema = bigQueryResult.getSchema(); + assertEquals("address", schema.getFields().get(0).getName()); + assertEquals(Field.Mode.NULLABLE, schema.getFields().get(0).getMode()); + // Backend is currently returning LegacySQLTypeName. Tracking bug: b/202977620 + assertEquals(LegacySQLTypeName.RECORD, schema.getFields().get(0).getType()); + assertEquals("city", schema.getFields().get(0).getSubFields().get(0).getName()); + assertEquals( + LegacySQLTypeName.STRING, schema.getFields().get(0).getSubFields().get(0).getType()); + assertEquals(Field.Mode.NULLABLE, schema.getFields().get(0).getSubFields().get(0).getMode()); + assertEquals("years", schema.getFields().get(0).getSubFields().get(1).getName()); + assertEquals( + LegacySQLTypeName.INTEGER, schema.getFields().get(0).getSubFields().get(1).getType()); + assertEquals(Field.Mode.NULLABLE, schema.getFields().get(0).getSubFields().get(1).getMode()); + + ResultSet rs = bigQueryResult.getResultSet(); + assertTrue(rs.next()); + FieldValueList addressFieldValue = + (com.google.cloud.bigquery.FieldValueList) rs.getObject("address"); + assertEquals(rs.getObject("address"), rs.getObject(0)); + assertEquals("Vancouver", addressFieldValue.get(0).getStringValue()); + assertEquals(5, addressFieldValue.get(1).getLongValue()); + assertFalse(rs.next()); // only 1 row of data + } + + @Test + void testExecuteSelectStructSubField() throws SQLException { + String query = + "select address.city from (select (STRUCT(\"Vancouver\" as city, 5 as years)) as address)"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + assertEquals(1, bigQueryResult.getTotalRows()); + + Schema schema = bigQueryResult.getSchema(); + assertEquals("city", schema.getFields().get(0).getName()); + assertEquals(Field.Mode.NULLABLE, schema.getFields().get(0).getMode()); + // Backend is currently returning LegacySQLTypeName. Tracking bug: b/202977620 + assertEquals(LegacySQLTypeName.STRING, schema.getFields().get(0).getType()); + assertNull( + schema.getFields().get(0).getSubFields()); // this is a String field without any subfields + + ResultSet rs = bigQueryResult.getResultSet(); + assertTrue(rs.next()); + String cityFieldValue = rs.getString("city"); + assertEquals(rs.getString("city"), rs.getObject(0)); + assertEquals("Vancouver", cityFieldValue); + assertFalse(rs.next()); // only 1 row of data + } + + @Test + void testExecuteSelectArray() throws SQLException { + String query = "SELECT [1,2,3]"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + assertEquals(1, bigQueryResult.getTotalRows()); + + Schema schema = bigQueryResult.getSchema(); + assertEquals("f0_", schema.getFields().get(0).getName()); + assertEquals(Field.Mode.REPEATED, schema.getFields().get(0).getMode()); + assertEquals(LegacySQLTypeName.INTEGER, schema.getFields().get(0).getType()); + assertNull(schema.getFields().get(0).getSubFields()); // no subfields for Integers + + ResultSet rs = bigQueryResult.getResultSet(); + assertTrue(rs.next()); + FieldValueList arrayFieldValue = (com.google.cloud.bigquery.FieldValueList) rs.getObject(0); + assertEquals(1, arrayFieldValue.get(0).getLongValue()); + assertEquals(2, arrayFieldValue.get(1).getLongValue()); + assertEquals(3, arrayFieldValue.get(2).getLongValue()); + } + + @Test + void testExecuteSelectArrayOfStruct() throws SQLException { + String query = + "SELECT [STRUCT(\"Vancouver\" as city, 5 as years), STRUCT(\"Boston\" as city, 10 as years)]"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + assertEquals(1, bigQueryResult.getTotalRows()); + + Schema schema = bigQueryResult.getSchema(); + assertEquals("f0_", schema.getFields().get(0).getName()); + assertEquals(Field.Mode.REPEATED, schema.getFields().get(0).getMode()); + // Backend is currently returning LegacySQLTypeName. Tracking bug: b/202977620 + // Verify the field metadata of the two subfields of the struct + assertEquals(LegacySQLTypeName.RECORD, schema.getFields().get(0).getType()); + assertEquals("city", schema.getFields().get(0).getSubFields().get(0).getName()); + assertEquals( + LegacySQLTypeName.STRING, schema.getFields().get(0).getSubFields().get(0).getType()); + assertEquals(Field.Mode.NULLABLE, schema.getFields().get(0).getSubFields().get(0).getMode()); + assertEquals("years", schema.getFields().get(0).getSubFields().get(1).getName()); + assertEquals( + LegacySQLTypeName.INTEGER, schema.getFields().get(0).getSubFields().get(1).getType()); + assertEquals(Field.Mode.NULLABLE, schema.getFields().get(0).getSubFields().get(1).getMode()); + + ResultSet rs = bigQueryResult.getResultSet(); + assertTrue(rs.next()); + FieldValueList arrayOfStructFieldValue = + (com.google.cloud.bigquery.FieldValueList) rs.getObject(0); + // Verify the values of the two structs in the array + assertEquals(Attribute.RECORD, arrayOfStructFieldValue.get(0).getAttribute()); + assertEquals( + "Vancouver", arrayOfStructFieldValue.get(0).getRecordValue().get(0).getStringValue()); + assertEquals(5, arrayOfStructFieldValue.get(0).getRecordValue().get(1).getLongValue()); + assertEquals(Attribute.RECORD, arrayOfStructFieldValue.get(1).getAttribute()); + assertEquals("Boston", arrayOfStructFieldValue.get(1).getRecordValue().get(0).getStringValue()); + assertEquals(10, arrayOfStructFieldValue.get(1).getRecordValue().get(1).getLongValue()); + } + + /* TODO(prasmish): replicate the entire test case for executeSelect */ + @Test + void testFastQueryMultipleRuns() throws InterruptedException { String query = - "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FASTQUERY.getTable(); + "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FAST_QUERY.getTable(); QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); assertEquals(QUERY_RESULT_SCHEMA, result.getSchema()); assertEquals(2, result.getTotalRows()); assertNull(result.getNextPage()); @@ -1812,6 +4603,8 @@ public void testFastQueryMultipleRuns() throws InterruptedException { // running the same QueryJobConfiguration with the same query again TableResult result1Duplicate = bigquery.query(config); + assertNotNull(result1Duplicate.getJobId()); + assertNotEquals(result.getJobId(), result1Duplicate.getJobId()); assertEquals(QUERY_RESULT_SCHEMA, result1Duplicate.getSchema()); assertEquals(2, result.getTotalRows()); assertNull(result1Duplicate.getNextPage()); @@ -1822,6 +4615,7 @@ public void testFastQueryMultipleRuns() throws InterruptedException { QueryJobConfiguration config2 = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); TableResult result2 = bigquery.query(config2); + assertNotNull(result2.getJobId()); assertEquals(QUERY_RESULT_SCHEMA, result2.getSchema()); assertEquals(2, result2.getTotalRows()); assertNull(result2.getNextPage()); @@ -1829,13 +4623,15 @@ public void testFastQueryMultipleRuns() throws InterruptedException { assertFalse(result2.hasNextPage()); } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testFastQuerySinglePageDuplicateRequestIds() throws InterruptedException { + void testFastQuerySinglePageDuplicateRequestIds() throws InterruptedException { String query = - "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FASTQUERY.getTable(); + "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FAST_QUERY.getTable(); QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); assertEquals(QUERY_RESULT_SCHEMA, result.getSchema()); assertEquals(2, result.getTotalRows()); assertNull(result.getNextPage()); @@ -1843,6 +4639,7 @@ public void testFastQuerySinglePageDuplicateRequestIds() throws InterruptedExcep assertFalse(result.hasNextPage()); TableResult result1 = bigquery.query(config); + assertNotNull(result1.getJobId()); assertEquals(QUERY_RESULT_SCHEMA, result1.getSchema()); assertEquals(2, result1.getTotalRows()); assertNull(result1.getNextPage()); @@ -1851,6 +4648,7 @@ public void testFastQuerySinglePageDuplicateRequestIds() throws InterruptedExcep config.toBuilder().setQuery(query).build(); TableResult result2 = bigquery.query(config); + assertNotNull(result2.getJobId()); assertEquals(QUERY_RESULT_SCHEMA, result2.getSchema()); assertEquals(2, result2.getTotalRows()); assertNull(result2.getNextPage()); @@ -1858,13 +4656,15 @@ public void testFastQuerySinglePageDuplicateRequestIds() throws InterruptedExcep assertFalse(result2.hasNextPage()); } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testFastSQLQuery() throws InterruptedException { + void testFastSQLQuery() throws InterruptedException { String query = - "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FASTQUERY.getTable(); + "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FAST_QUERY.getTable(); QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); assertEquals(QUERY_RESULT_SCHEMA, result.getSchema()); assertEquals(2, result.getTotalRows()); assertNull(result.getNextPage()); @@ -1883,18 +4683,103 @@ public void testFastSQLQuery() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); + } + } + + @Test + void testProjectIDFastSQLQueryWithJobId() { + String invalidProjectId = generateRandomName("RANDOM_PROJECT_").replace('-', '_'); + String query = + "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID_FAST_QUERY.getTable(); + // With incorrect projectID in jobid + // The job will be created with the specified(incorrect) projectID + // hence failing the operation + JobId jobIdWithProjectId = JobId.newBuilder().setProject(invalidProjectId).build(); + QueryJobConfiguration configSelect = + QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); + try { + bigquery.query(configSelect, jobIdWithProjectId); + } catch (Exception exception) { + // error message for non-existent project + assertEquals("Cannot parse as CloudRegion.", exception.getMessage()); + assertEquals(BigQueryException.class, exception.getClass()); + } + } + + @Test + void testLocationFastSQLQueryWithJobId() throws InterruptedException { + TableId tableIdFastQueryUk = TableId.of(UK_DATASET, "fastquery_testing_table"); + DatasetInfo infoUK = + DatasetInfo.newBuilder(UK_DATASET) + .setDescription(DESCRIPTION) + .setLocation("europe-west1") + .setLabels(LABELS) + .build(); + bigquery.create(infoUK); + + TableDefinition tableDefinition = StandardTableDefinition.of(SIMPLE_SCHEMA); + TableInfo tableInfo = TableInfo.newBuilder(tableIdFastQueryUk, tableDefinition).build(); + bigquery.create(tableInfo); + + String insert = + "INSERT " + UK_DATASET + "." + tableIdFastQueryUk.getTable() + " VALUES('Anna');"; + + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(insert) + .setDefaultDataset(DatasetId.of(UK_DATASET)) + .build(); + TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); + assertEquals(SIMPLE_SCHEMA, result.getSchema()); + // Use `getNumDmlAffectedRows()` for DML operations + Job queryJob = bigquery.getJob(result.getJobId()); + queryJob = queryJob.waitFor(); + JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); + assertEquals(1L, statistics.getNumDmlAffectedRows().longValue()); + + // Verify correctness of table content + for (FieldValueList row : result.getValues()) { + FieldValue stringCell = row.get(0); + assertEquals(stringCell, row.get("StringField")); + assertEquals("Anna", stringCell.getStringValue()); + } + // With incorrect location in jobid + // The job will be created with the specified(incorrect) location + // hence failing the operation + String query = "SELECT StringField FROM " + tableIdFastQueryUk.getTable(); + JobId jobIdWithLocation = JobId.newBuilder().setLocation("us-west1").build(); + QueryJobConfiguration configSelect = + QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(UK_DATASET)).build(); + try { + bigquery.query(configSelect, jobIdWithLocation); + } catch (BigQueryException exception) { + assertTrue(exception.getMessage().contains("Not found")); + assertEquals(BigQueryException.class, exception.getClass()); + } + + // Without location in jobID, the query job defaults to the location of the dataset + JobId jobIdNoLocation = JobId.newBuilder().build(); + QueryJobConfiguration configNoLocation = + QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(UK_DATASET)).build(); + TableResult resultNoLocation = bigquery.query(configNoLocation, jobIdNoLocation); + for (FieldValueList row : resultNoLocation.getValues()) { + FieldValue stringCell = row.get(0); + assertEquals(stringCell, row.get("StringField")); + assertEquals("Anna", stringCell.getStringValue()); } } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testFastSQLQueryMultiPage() throws InterruptedException { + void testFastSQLQueryMultiPage() throws InterruptedException { String query = "SELECT date, county, state_name, county_fips_code, confirmed_cases, deaths FROM " + TABLE_ID_LARGE.getTable(); QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); assertEquals(LARGE_TABLE_SCHEMA, result.getSchema()); assertEquals(313348, result.getTotalRows()); assertNotNull(result.getNextPage()); @@ -1902,6 +4787,7 @@ public void testFastSQLQueryMultiPage() throws InterruptedException { assertTrue(result.hasNextPage()); TableResult result1 = bigquery.query(config); + assertNotNull(result1.getJobId()); assertEquals(LARGE_TABLE_SCHEMA, result.getSchema()); assertEquals(313348, result.getTotalRows()); assertNotNull(result1.getNextPage()); @@ -1910,6 +4796,7 @@ public void testFastSQLQueryMultiPage() throws InterruptedException { config.toBuilder().setQuery(query).build(); TableResult result2 = bigquery.query(config); + assertNotNull(result2.getJobId()); assertEquals(LARGE_TABLE_SCHEMA, result2.getSchema()); assertEquals(313348, result2.getTotalRows()); assertNotNull(result2.getNextPage()); @@ -1918,55 +4805,61 @@ public void testFastSQLQueryMultiPage() throws InterruptedException { } @Test - public void testFastDMLQuery() throws InterruptedException { - String tableName = TABLE_ID_FASTQUERY.getTable(); - String dmlQuery = - String.format("UPDATE %s.%s SET StringField = 'hello' WHERE TRUE", DATASET, tableName); - QueryJobConfiguration dmlConfig = QueryJobConfiguration.newBuilder(dmlQuery).build(); - TableResult result = bigquery.query(dmlConfig); - assertEquals(TABLE_SCHEMA, result.getSchema()); - assertEquals(2, result.getTotalRows()); - // Verify correctness of table content - String sqlQuery = String.format("SELECT * FROM %s.%s", DATASET, tableName); - QueryJobConfiguration sqlConfig = QueryJobConfiguration.newBuilder(sqlQuery).build(); - TableResult resultAfterDML = bigquery.query(sqlConfig); - for (FieldValueList row : resultAfterDML.getValues()) { - FieldValue timestampCell = row.get(0); - assertEquals(timestampCell, row.get("TimestampField")); - FieldValue stringCell = row.get(1); - assertEquals(stringCell, row.get("StringField")); - FieldValue booleanCell = row.get(3); - assertEquals(booleanCell, row.get("BooleanField")); - assertEquals(FieldValue.Attribute.PRIMITIVE, timestampCell.getAttribute()); - assertEquals(FieldValue.Attribute.PRIMITIVE, stringCell.getAttribute()); - assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); - assertEquals(1408452095220000L, timestampCell.getTimestampValue()); - assertEquals("hello", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); - } - } - - @Test - public void testFastDDLQuery() throws InterruptedException { - String tableName = "test_table_fast_query_ddl"; - String tableNameFastQuery = TABLE_ID_DDL.getTable(); + void testFastDMLQuery() throws InterruptedException { + // The test runs an update query. Clone the table to ensure that this doesn't impact + // other tests. + String tableName = generateRandomName("test_table_fast_query_dml"); + String tableNameFastQuery = TABLE_ID_SIMPLE.getTable(); String ddlQuery = String.format( "CREATE OR REPLACE TABLE %s (" + "TimestampField TIMESTAMP OPTIONS(description='TimestampDescription'), " + "StringField STRING OPTIONS(description='StringDescription'), " + "BooleanField BOOLEAN OPTIONS(description='BooleanDescription') " - + ") AS SELECT * FROM %s", + + ") AS SELECT DISTINCT * FROM %s", tableName, tableNameFastQuery); QueryJobConfiguration ddlConfig = QueryJobConfiguration.newBuilder(ddlQuery).setDefaultDataset(DatasetId.of(DATASET)).build(); TableResult result = bigquery.query(ddlConfig); - assertEquals(DDL_TABLE_SCHEMA, result.getSchema()); - assertEquals(0, result.getTotalRows()); - // Verify correctness of table content - String sqlQuery = String.format("SELECT * FROM %s.%s", DATASET, tableName); - QueryJobConfiguration sqlConfig = QueryJobConfiguration.newBuilder(sqlQuery).build(); + assertNotNull(result.getJobId()); + + String dmlQuery = + String.format("UPDATE %s.%s SET StringField = 'hello' WHERE TRUE", DATASET, tableName); + QueryJobConfiguration dmlConfig = QueryJobConfiguration.newBuilder(dmlQuery).build(); + TableResult resultAfterDML = bigquery.query(dmlConfig); + assertNotNull(resultAfterDML.getJobId()); + assertEquals(SIMPLE_TABLE_SCHEMA, resultAfterDML.getSchema()); + // Using the job reference on the TableResult, lookup and verify DML statistics. + Job queryJob = bigquery.getJob(resultAfterDML.getJobId()); + queryJob = queryJob.waitFor(); + JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); + assertEquals(1L, statistics.getNumDmlAffectedRows().longValue()); + assertEquals(1L, statistics.getDmlStats().getUpdatedRowCount().longValue()); + } + + @Test + void testFastDDLQuery() throws InterruptedException { + String tableName = generateRandomName("test_table_fast_query_ddl"); + String tableNameFastQuery = TABLE_ID_SIMPLE.getTable(); + String ddlQuery = + String.format( + "CREATE OR REPLACE TABLE %s (" + + "TimestampField TIMESTAMP OPTIONS(description='TimestampDescription'), " + + "StringField STRING OPTIONS(description='StringDescription'), " + + "BooleanField BOOLEAN OPTIONS(description='BooleanDescription') " + + ") AS SELECT * FROM %s", + tableName, tableNameFastQuery); + QueryJobConfiguration ddlConfig = + QueryJobConfiguration.newBuilder(ddlQuery).setDefaultDataset(DatasetId.of(DATASET)).build(); + TableResult result = bigquery.query(ddlConfig); + assertNotNull(result.getJobId()); + assertEquals(SIMPLE_TABLE_SCHEMA, result.getSchema()); + assertEquals(0, result.getTotalRows()); + // Verify correctness of table content + String sqlQuery = String.format("SELECT * FROM %s.%s", DATASET, tableName); + QueryJobConfiguration sqlConfig = QueryJobConfiguration.newBuilder(sqlQuery).build(); TableResult resultAfterDDL = bigquery.query(sqlConfig); + assertNotNull(resultAfterDDL.getJobId()); for (FieldValueList row : resultAfterDDL.getValues()) { FieldValue timestampCell = row.get(0); assertEquals(timestampCell, row.get("TimestampField")); @@ -1979,14 +4872,13 @@ public void testFastDDLQuery() throws InterruptedException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); } } @Test - public void testFastQuerySlowDDL() throws InterruptedException { - String tableName = - "test_table_fast_query_ddl_slow_" + UUID.randomUUID().toString().substring(0, 8); + void testFastQuerySlowDDL() throws InterruptedException { + String tableName = generateRandomName("test_table_fast_query_ddl_slow_"); // This query take more than 10s to run and should fall back on the old query path String slowDdlQuery = String.format( @@ -1997,12 +4889,14 @@ public void testFastQuerySlowDDL() throws InterruptedException { .setDefaultDataset(DatasetId.of(DATASET)) .build(); TableResult result = bigquery.query(ddlConfig); + assertNotNull(result.getJobId()); assertEquals(0, result.getTotalRows()); assertNotNull(result.getSchema()); // Verify correctness of table content String sqlQuery = String.format("SELECT * FROM %s.%s", DATASET, tableName); QueryJobConfiguration sqlConfig = QueryJobConfiguration.newBuilder(sqlQuery).build(); TableResult resultAfterDDL = bigquery.query(sqlConfig); + assertNotNull(resultAfterDDL.getJobId()); for (FieldValueList row : resultAfterDDL.getValues()) { FieldValue unique_key = row.get(0); assertEquals(unique_key, row.get("unique_key")); @@ -2013,22 +4907,23 @@ public void testFastQuerySlowDDL() throws InterruptedException { } } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testFastQueryHTTPException() throws InterruptedException { + void testFastQueryHTTPException() throws InterruptedException { String queryInvalid = - "CREATE OR REPLACE SELECT * FROM UPDATE TABLE SET " + TABLE_ID_FASTQUERY.getTable(); + "CREATE OR REPLACE SELECT * FROM UPDATE TABLE SET " + TABLE_ID_FAST_QUERY.getTable(); QueryJobConfiguration configInvalidQuery = QueryJobConfiguration.newBuilder(queryInvalid) .setDefaultDataset(DatasetId.of(DATASET)) .build(); - try { - bigquery.query(configInvalidQuery); - fail("\"BigQueryException was expected\""); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error.getMessage()); - assertEquals("invalidQuery", error.getReason()); - } + BigQueryException exception = + assertThrows( + BigQueryException.class, + () -> bigquery.query(configInvalidQuery), + "BigQueryException was expected"); + BigQueryError error = exception.getError(); + assertNotNull(error.getMessage()); + assertEquals("invalidQuery", error.getReason()); String queryMissingTable = "SELECT * FROM " + TableId.of(DATASET, "non_existing_table").getTable(); @@ -2036,18 +4931,279 @@ public void testFastQueryHTTPException() throws InterruptedException { QueryJobConfiguration.newBuilder(queryMissingTable) .setDefaultDataset(DatasetId.of(DATASET)) .build(); - try { - bigquery.query(configMissingTable); - fail("\"BigQueryException was expected\""); - } catch (BigQueryException e) { - BigQueryError error = e.getError(); - assertNotNull(error.getMessage()); - assertEquals("notFound", error.getReason()); + + BigQueryException exception1 = + assertThrows( + BigQueryException.class, + () -> bigquery.query(configMissingTable), + "BigQueryException was expected"); + BigQueryError error1 = exception1.getError(); + assertNotNull(error1.getMessage()); + assertEquals("notFound", error1.getReason()); + } + + @Test + void testQuerySessionSupport() throws InterruptedException { + String query = "CREATE TEMPORARY TABLE temptable AS SELECT 17 as foo"; + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .setCreateSession(true) + .build(); + Job remoteJob = bigquery.create(JobInfo.of(queryJobConfiguration)); + remoteJob = remoteJob.waitFor(); + assertNull(remoteJob.getStatus().getError()); + + Job queryJob = bigquery.getJob(remoteJob.getJobId()); + JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); + String sessionId = statistics.getSessionInfo().getSessionId(); + assertNotNull(sessionId); + + String queryTempTable = "SELECT * FROM temptable"; + ConnectionProperty connectionProperty = + ConnectionProperty.newBuilder().setKey("session_id").setValue(sessionId).build(); + QueryJobConfiguration queryJobConfigurationWithSession = + QueryJobConfiguration.newBuilder(queryTempTable) + .setDefaultDataset(DatasetId.of(DATASET)) + .setConnectionProperties(ImmutableList.of(connectionProperty)) + .build(); + Job remoteJobWithSession = bigquery.create(JobInfo.of(queryJobConfigurationWithSession)); + remoteJobWithSession = remoteJobWithSession.waitFor(); + assertNull(remoteJobWithSession.getStatus().getError()); + Job queryJobWithSession = bigquery.getJob(remoteJobWithSession.getJobId()); + QueryStatistics statisticsWithSession = queryJobWithSession.getStatistics(); + assertEquals(sessionId, statisticsWithSession.getSessionInfo().getSessionId()); + } + + @Test + void testLoadSessionSupportWriteChannelConfiguration() throws InterruptedException { + TableId sessionTableId = TableId.of("_SESSION", "test_temp_destination_table_from_file"); + + WriteChannelConfiguration configuration = + WriteChannelConfiguration.newBuilder(sessionTableId) + .setFormatOptions(CsvOptions.newBuilder().setFieldDelimiter(",").build()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setSchema(SESSION_TABLE_SCHEMA) + .setCreateSession(true) + .build(); + String jobName = "jobId_" + UUID.randomUUID().toString(); + JobId jobId = JobId.newBuilder().setLocation("us").setJob(jobName).build(); + String sessionId; + + // Imports a local file into a table. + try (TableDataWriteChannel writer = bigquery.writer(jobId, configuration); + OutputStream stream = Channels.newOutputStream(writer)) { + InputStream inputStream = + ITBigQueryTest.class.getClassLoader().getResourceAsStream("sessionTest.csv"); + // Can use `Files.copy(csvPath, stream);` instead. + // Using IOUtils here because graalvm can't handle resource files. + IOUtils.copy(inputStream, stream); + + } catch (IOException e) { + throw new RuntimeException(e); + } + Job loadJob = bigquery.getJob(jobId); + Job completedJob = loadJob.waitFor(); + + assertNotNull(completedJob); + assertEquals(jobId.getJob(), completedJob.getJobId().getJob()); + JobStatistics.LoadStatistics statistics = completedJob.getStatistics(); + + sessionId = statistics.getSessionInfo().getSessionId(); + assertNotNull(sessionId); + + // Load job in the same session. + // Should load the data to a temp table. + ConnectionProperty sessionConnectionProperty = + ConnectionProperty.newBuilder().setKey("session_id").setValue(sessionId).build(); + WriteChannelConfiguration sessionConfiguration = + WriteChannelConfiguration.newBuilder(sessionTableId) + .setConnectionProperties(ImmutableList.of(sessionConnectionProperty)) + .setFormatOptions(CsvOptions.newBuilder().setFieldDelimiter(",").build()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setSchema(SESSION_TABLE_SCHEMA) + .build(); + String sessionJobName = "jobId_" + UUID.randomUUID().toString(); + JobId sessionJobId = JobId.newBuilder().setLocation("us").setJob(sessionJobName).build(); + try (TableDataWriteChannel writer = bigquery.writer(sessionJobId, sessionConfiguration); + OutputStream stream = Channels.newOutputStream(writer)) { + InputStream inputStream = + ITBigQueryTest.class.getClassLoader().getResourceAsStream("sessionTest.csv"); + IOUtils.copy(inputStream, stream); + } catch (IOException e) { + throw new RuntimeException(e); + } + Job queryJobWithSession = bigquery.getJob(sessionJobId); + queryJobWithSession = queryJobWithSession.waitFor(); + LoadStatistics statisticsWithSession = queryJobWithSession.getStatistics(); + assertNotNull(statisticsWithSession.getSessionInfo().getSessionId()); + + // Checking if the data loaded to the temp table in the session + String queryTempTable = "SELECT * FROM _SESSION.test_temp_destination_table_from_file;"; + QueryJobConfiguration queryJobConfigurationWithSession = + QueryJobConfiguration.newBuilder(queryTempTable) + .setConnectionProperties(ImmutableList.of(sessionConnectionProperty)) + .build(); + Job queryTempTableJob = bigquery.create(JobInfo.of(queryJobConfigurationWithSession)); + queryTempTableJob = queryTempTableJob.waitFor(); + assertNotNull(queryTempTableJob.getQueryResults()); + } + + @Test + void testLoadSessionSupport() throws InterruptedException { + // Start the session + TableId sessionTableId = TableId.of("_SESSION", "test_temp_destination_table"); + LoadJobConfiguration configuration = + LoadJobConfiguration.newBuilder( + sessionTableId, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setSchema(TABLE_SCHEMA) + .setCreateSession(true) + .build(); + Job job = bigquery.create(JobInfo.of(configuration)); + job = job.waitFor(); + assertNull(job.getStatus().getError()); + + Job loadJob = bigquery.getJob(job.getJobId()); + JobStatistics.LoadStatistics statistics = loadJob.getStatistics(); + assertThat(statistics.getTotalSlotMs()).isGreaterThan(0L); + String sessionId = statistics.getSessionInfo().getSessionId(); + assertNotNull(sessionId); + + // Load job in the same session. + // Should load the data to a temp table. + ConnectionProperty sessionConnectionProperty = + ConnectionProperty.newBuilder().setKey("session_id").setValue(sessionId).build(); + LoadJobConfiguration loadJobConfigurationWithSession = + LoadJobConfiguration.newBuilder( + sessionTableId, "gs://" + BUCKET + "/" + JSON_LOAD_FILE, FormatOptions.json()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setSchema(TABLE_SCHEMA) + .setConnectionProperties(ImmutableList.of(sessionConnectionProperty)) + .build(); + Job remoteJobWithSession = bigquery.create(JobInfo.of(loadJobConfigurationWithSession)); + remoteJobWithSession = remoteJobWithSession.waitFor(); + assertNull(remoteJobWithSession.getStatus().getError()); + Job queryJobWithSession = bigquery.getJob(remoteJobWithSession.getJobId()); + LoadStatistics statisticsWithSession = queryJobWithSession.getStatistics(); + assertNotNull(statisticsWithSession.getSessionInfo().getSessionId()); + + // Checking if the data loaded to the temp table in the session + String queryTempTable = "SELECT * FROM _SESSION.test_temp_destination_table;"; + QueryJobConfiguration queryJobConfigurationWithSession = + QueryJobConfiguration.newBuilder(queryTempTable) + .setConnectionProperties(ImmutableList.of(sessionConnectionProperty)) + .build(); + Job queryTempTableJob = bigquery.create(JobInfo.of(queryJobConfigurationWithSession)); + queryTempTableJob = queryTempTableJob.waitFor(); + assertNull(queryTempTableJob.getStatus().getError()); + assertNotNull(queryTempTableJob.getQueryResults()); + } + + // TODO: uncomment this testcase when executeUpdate is implemented + // @Test + // public void testExecuteSelectWithSession() throws BigQuerySQLException { + // String query = "CREATE TEMPORARY TABLE temptable AS SELECT 17 as foo"; + // ConnectionSettings connectionSettings = + // ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).setCreateSession(true).build(); + // Connection connection = bigquery.createConnection(connectionSettings); + // BigQueryResult bigQueryResult = connection.execute(query); + // BigQueryResultStats stats = bigQueryResult.getBigQueryResultStats(); + // assertNotNull(stats.getSessionInfo().getSessionId()); + // } + + @Test + void testExecuteSelectSessionSupport() throws BigQuerySQLException { + String query = "SELECT 17 as foo"; + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .setCreateSession(true) + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + BigQueryResult bigQueryResult = connection.executeSelect(query); + String sessionId = bigQueryResult.getBigQueryResultStats().getSessionInfo().getSessionId(); + assertNotNull(sessionId); + } + + @Test + void testDmlStatistics() throws InterruptedException { + // This runs an update SQL query. Clone the table to ensure that this doesn't impact + // other tests. + String tableName = generateRandomName("test_table_dml_stats"); + String tableNameSimple = TABLE_ID_SIMPLE.getTable(); + String ddlQuery = + String.format( + "CREATE OR REPLACE TABLE %s (" + + "TimestampField TIMESTAMP OPTIONS(description='TimestampDescription'), " + + "StringField STRING OPTIONS(description='StringDescription'), " + + "BooleanField BOOLEAN OPTIONS(description='BooleanDescription') " + + ") AS SELECT DISTINCT * FROM %s", + tableName, tableNameSimple); + QueryJobConfiguration ddlConfig = + QueryJobConfiguration.newBuilder(ddlQuery).setDefaultDataset(DatasetId.of(DATASET)).build(); + TableResult result = bigquery.query(ddlConfig); + assertNotNull(result.getJobId()); + + String dmlQuery = + String.format("UPDATE %s.%s SET StringField = 'hello' WHERE TRUE", DATASET, tableName); + QueryJobConfiguration dmlConfig = QueryJobConfiguration.newBuilder(dmlQuery).build(); + Job remoteJob = bigquery.create(JobInfo.of(dmlConfig)); + remoteJob = remoteJob.waitFor(); + assertNull(remoteJob.getStatus().getError()); + + TableResult resultAfterUpdate = remoteJob.getQueryResults(); + assertNotNull(resultAfterUpdate.getJobId()); + assertEquals(SIMPLE_TABLE_SCHEMA, resultAfterUpdate.getSchema()); + + Job queryJob = bigquery.getJob(remoteJob.getJobId()); + queryJob = queryJob.waitFor(); + JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); + assertEquals(1L, statistics.getNumDmlAffectedRows().longValue()); + assertEquals(1L, statistics.getDmlStats().getUpdatedRowCount().longValue()); + } + + /* TODO(prasmish): replicate the entire test case for executeSelect */ + @Test + void testTransactionInfo() throws InterruptedException { + // The transaction runs an update query. Clone the table to ensure that this doesn't impact + // other tests. + String tableName = generateRandomName("test_table_transaction_info"); + String tableNameSimple = TABLE_ID_SIMPLE.getTable(); + String ddlQuery = + String.format( + "CREATE OR REPLACE TABLE %s (" + + "TimestampField TIMESTAMP OPTIONS(description='TimestampDescription'), " + + "StringField STRING OPTIONS(description='StringDescription'), " + + "BooleanField BOOLEAN OPTIONS(description='BooleanDescription') " + + ") AS SELECT DISTINCT * FROM %s", + tableName, tableNameSimple); + QueryJobConfiguration ddlConfig = + QueryJobConfiguration.newBuilder(ddlQuery).setDefaultDataset(DatasetId.of(DATASET)).build(); + TableResult result = bigquery.query(ddlConfig); + assertNotNull(result.getJobId()); + + String transaction = + String.format( + "BEGIN TRANSACTION;\n" + + " UPDATE %s.%s SET StringField = 'hello' WHERE TRUE;\n" + + " COMMIT TRANSACTION;\n", + DATASET, tableName); + QueryJobConfiguration config = QueryJobConfiguration.of(transaction); + Job remoteJob = bigquery.create(JobInfo.of(config)); + JobInfo parentJobInfo = remoteJob.waitFor(); + String parentJobId = parentJobInfo.getJobId().getJob(); + Page childJobs = bigquery.listJobs(JobListOption.parentJobId(parentJobId)); + for (Job job : childJobs.iterateAll()) { + // only those child jobs inside the transaction would have transactionInfo populated + TransactionInfo transactionInfo = job.getStatistics().getTransactionInfo(); + assertNotNull(transactionInfo.getTransactionId()); } } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testScriptStatistics() throws InterruptedException { + void testScriptStatistics() throws InterruptedException { String script = "-- Declare a variable to hold names as an array.\n" + "DECLARE top_names ARRAY;\n" @@ -2100,7 +5256,34 @@ public void testScriptStatistics() throws InterruptedException { } @Test - public void testPositionalQueryParameters() throws InterruptedException { + void testQueryParameterModeWithDryRun() { + String query = + "SELECT TimestampField, StringField, BooleanField, BigNumericField, BigNumericField1, BigNumericField2, BigNumericField3, BigNumericField4 FROM " + + TABLE_ID.getTable() + + " WHERE StringField = ?" + + " AND TimestampField > ?" + + " AND IntegerField IN UNNEST(?)" + + " AND IntegerField < ?" + + " AND FloatField > ?" + + " AND NumericField < ?" + + " AND BigNumericField = ?"; + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .setParameterMode("POSITIONAL") + .setUseLegacySql(false) + .setDryRun(true) + .build(); + + Job job = bigquery.create(JobInfo.of(queryConfig)); + JobStatistics.QueryStatistics statistics = job.getStatistics(); + + assertNotNull(statistics.getTotalBytesProcessed()); + } + + @Test + void testPositionalQueryParameters() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField, BigNumericField, BigNumericField1, BigNumericField2, BigNumericField3, BigNumericField4 FROM " + TABLE_ID.getTable() @@ -2151,12 +5334,18 @@ public void testPositionalQueryParameters() throws InterruptedException { .addPositionalParameter(bigNumericParameter4) .build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); assertEquals(QUERY_RESULT_SCHEMA_BIGNUMERIC, result.getSchema()); assertEquals(2, Iterables.size(result.getValues())); for (FieldValueList values : result.iterateAll()) { - assertEquals("1.40845209522E9", values.get(0).getValue()); + // https://github.com/googleapis/java-bigquery/issues/2056. String comparison of values, eg + // 1.40845209522E9 vs 1408452095.22 seems to be failing, so comparing the values as epoc + // (Long) instead + assertEquals( + (long) Double.parseDouble("1.40845209522E9"), + (long) Double.parseDouble(values.get(0).getValue().toString())); assertEquals("stringValue", values.get(1).getValue()); - assertEquals(false, values.get(2).getBooleanValue()); + assertFalse(values.get(2).getBooleanValue()); assertEquals("0.33333333333333333333333333333333333333", values.get(3).getValue()); assertEquals("0.00000000000000000000000000000000000001", values.get(4).getValue()); assertEquals("-100000000000000000000000000000000000000", values.get(5).getValue()); @@ -2169,8 +5358,29 @@ public void testPositionalQueryParameters() throws InterruptedException { } } + /* TODO(prasmish): expand below test case with all the fields shown in the above test case */ + @Test + void testExecuteSelectWithPositionalQueryParameters() throws BigQuerySQLException { + String query = + "SELECT TimestampField, StringField FROM " + + TABLE_ID.getTable() + + " WHERE StringField = ?" + + " AND TimestampField > ?"; + QueryParameterValue stringParameter = QueryParameterValue.string("stringValue"); + QueryParameterValue timestampParameter = + QueryParameterValue.timestamp("2014-01-01 07:00:00.000000+00:00"); + Parameter stringParam = Parameter.newBuilder().setValue(stringParameter).build(); + Parameter timeStampParam = Parameter.newBuilder().setValue(timestampParameter).build(); + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + Connection connection = bigquery.createConnection(connectionSettings); + List parameters = ImmutableList.of(stringParam, timeStampParam); + BigQueryResult rs = connection.executeSelect(query, parameters); + assertEquals(2, rs.getTotalRows()); + } + @Test - public void testNamedQueryParameters() throws InterruptedException { + void testNamedQueryParameters() throws InterruptedException { String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable() @@ -2187,12 +5397,37 @@ public void testNamedQueryParameters() throws InterruptedException { .addNamedParameter("integerList", intArrayParameter) .build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); assertEquals(QUERY_RESULT_SCHEMA, result.getSchema()); assertEquals(2, Iterables.size(result.getValues())); } @Test - public void testStructNamedQueryParameters() throws InterruptedException { + void testExecuteSelectWithNamedQueryParameters() throws BigQuerySQLException { + String query = + "SELECT TimestampField, StringField, BooleanField FROM " + + TABLE_ID.getTable() + + " WHERE StringField = @stringParam" + + " AND IntegerField IN UNNEST(@integerList)"; + QueryParameterValue stringParameter = QueryParameterValue.string("stringValue"); + QueryParameterValue intArrayParameter = + QueryParameterValue.array(new Integer[] {3, 4}, Integer.class); + Parameter stringParam = + Parameter.newBuilder().setName("stringParam").setValue(stringParameter).build(); + Parameter intArrayParam = + Parameter.newBuilder().setName("integerList").setValue(intArrayParameter).build(); + + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder().setDefaultDataset(DatasetId.of(DATASET)).build(); + Connection connection = bigquery.createConnection(connectionSettings); + List parameters = ImmutableList.of(stringParam, intArrayParam); + BigQueryResult rs = connection.executeSelect(query, parameters); + assertEquals(2, rs.getTotalRows()); + } + + /* TODO(prasmish): replicate relevant parts of the test case for executeSelect */ + @Test + void testStructNamedQueryParameters() throws InterruptedException { QueryParameterValue booleanValue = QueryParameterValue.bool(true); QueryParameterValue stringValue = QueryParameterValue.string("test-stringField"); QueryParameterValue integerValue = QueryParameterValue.int64(10); @@ -2201,7 +5436,7 @@ public void testStructNamedQueryParameters() throws InterruptedException { struct.put("integerField", integerValue); struct.put("stringField", stringValue); QueryParameterValue recordValue = QueryParameterValue.struct(struct); - String query = "SELECT STRUCT(@recordField) AS record"; + String query = "SELECT @recordField AS record"; QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query) .setDefaultDataset(DATASET) @@ -2209,18 +5444,224 @@ public void testStructNamedQueryParameters() throws InterruptedException { .addNamedParameter("recordField", recordValue) .build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); + assertEquals(1, Iterables.size(result.getValues())); + for (FieldValueList values : result.iterateAll()) { + for (FieldValue value : values) { + assertsFieldValue(value); + } + } + } + + @Test + void testRepeatedRecordNamedQueryParameters() throws InterruptedException { + String[] stringValues = new String[] {"test-stringField", "test-stringField2"}; + List tuples = new ArrayList<>(); + for (int i = 0; i < 2; i++) { + QueryParameterValue stringValue = QueryParameterValue.string(stringValues[i]); + Map struct = new HashMap<>(); + struct.put("stringField", stringValue); + QueryParameterValue recordValue = QueryParameterValue.struct(struct); + tuples.add(recordValue); + } + + QueryParameterValue repeatedRecord = + QueryParameterValue.array(tuples.toArray(), StandardSQLTypeName.STRUCT); + String query = "SELECT @repeatedRecordField AS repeatedRecord"; + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter("repeatedRecordField", repeatedRecord) + .build(); + TableResult result = bigquery.query(config); assertEquals(1, Iterables.size(result.getValues())); + + FieldList subSchema = result.getSchema().getFields().get("repeatedRecord").getSubFields(); for (FieldValueList values : result.iterateAll()) { for (FieldValue value : values) { - for (FieldValue record : value.getRecordValue()) { - assertsFieldValue(record); + assertEquals(FieldValue.Attribute.REPEATED, value.getAttribute()); + assertEquals(2, value.getRepeatedValue().size()); + for (int i = 0; i < 2; i++) { + FieldValue record = value.getRepeatedValue().get(i); + assertEquals(FieldValue.Attribute.RECORD, record.getAttribute()); + FieldValueList recordValue = record.getRecordValue(); + assertEquals( + stringValues[i], + FieldValueList.of(recordValue, subSchema).get("stringField").getValue()); } } } } @Test - public void testStructQuery() throws InterruptedException { + void testUnnestRepeatedRecordNamedQueryParameter() throws InterruptedException { + Boolean[] boolValues = new Boolean[] {true, false}; + List tuples = new ArrayList<>(); + for (int i = 0; i < 2; i++) { + QueryParameterValue boolValue = QueryParameterValue.bool(boolValues[i]); + Map struct = new HashMap<>(); + struct.put("boolField", boolValue); + QueryParameterValue recordValue = QueryParameterValue.struct(struct); + tuples.add(recordValue); + } + + QueryParameterValue repeatedRecord = + QueryParameterValue.array(tuples.toArray(), StandardSQLTypeName.STRUCT); + String query = + "SELECT * FROM (SELECT STRUCT(" + + boolValues[0] + + " AS boolField) AS repeatedRecord) WHERE repeatedRecord IN UNNEST(@repeatedRecordField)"; + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter("repeatedRecordField", repeatedRecord) + .build(); + TableResult result = bigquery.query(config); + assertEquals(1, Iterables.size(result.getValues())); + + FieldList subSchema = result.getSchema().getFields().get("repeatedRecord").getSubFields(); + for (FieldValueList values : result.iterateAll()) { + for (FieldValue value : values) { + assertEquals(FieldValue.Attribute.RECORD, value.getAttribute()); + FieldValueList recordValue = value.getRecordValue(); + assertEquals( + boolValues[0], + FieldValueList.of(recordValue, subSchema).get("boolField").getBooleanValue()); + } + } + } + + @Test + void testUnnestRepeatedRecordNamedQueryParameterFromDataset() throws InterruptedException { + TableId tableId = TableId.of(DATASET, "test_repeated_record_table"); + setUpRepeatedRecordTable(tableId); + + List tuples = new ArrayList<>(); + QueryParameterValue statusValue = QueryParameterValue.string("single"); + QueryParameterValue addressValue = QueryParameterValue.string("123 this lane"); + QueryParameterValue cityValue = QueryParameterValue.string("Toronto"); + QueryParameterValue stateValue = QueryParameterValue.string("ON"); + QueryParameterValue zipValue = QueryParameterValue.string("1h2j34"); + QueryParameterValue numberOfYearsValue = QueryParameterValue.string("3"); + + Map struct = new LinkedHashMap<>(); + struct.put("statusValue", statusValue); + struct.put("addressValue", addressValue); + struct.put("cityValue", cityValue); + struct.put("stateValue", stateValue); + struct.put("zipValue", zipValue); + struct.put("numberOfYearsValue", numberOfYearsValue); + QueryParameterValue recordValue = QueryParameterValue.struct(struct); + tuples.add(recordValue); + + QueryParameterValue repeatedRecord = + QueryParameterValue.array(tuples.toArray(), StandardSQLTypeName.STRUCT); + + String query = + "SELECT * FROM " + + tableId.getTable() + + ", UNNEST(@repeatedRecord) AS TEMP where TEMP IN UNNEST(addresses);"; + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter("repeatedRecord", repeatedRecord) + .build(); + TableResult results = bigquery.query(queryConfig); + + assertEquals(1, Iterables.size(results.getValues())); + for (FieldValueList values : results.iterateAll()) { + assertEquals("1", values.get("ID").getStringValue()); + assertEquals("first_name1", values.get("FirstName").getStringValue()); + assertEquals(2, values.get("Addresses").getRecordValue().size()); + } + } + + private void setUpRepeatedRecordTable(TableId tableId) { + StandardTableDefinition tableDefinition = + StandardTableDefinition.of(REPEATED_RECORD_TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(tableId, tableDefinition); + bigquery.create(tableInfo); + + ImmutableMap.Builder builder1 = ImmutableMap.builder(); + builder1.put("ID", "1"); + builder1.put("FirstName", "first_name1"); + builder1.put("LastName", "last_name1"); + builder1.put("DOB", "1995-08-09"); + builder1.put( + "Addresses", + ImmutableList.of( + ImmutableMap.of( + "Status", "single", + "Address", "123 this lane", + "City", "Toronto", + "State", "ON", + "Zip", "1h2j34", + "NumberOfYears", "3"), + ImmutableMap.of( + "Status", "couple", + "Address", "345 that lane", + "City", "Maple", + "State", "ON", + "Zip", "1h2j34", + "NumberOfYears", "5"))); + + ImmutableMap.Builder builder2 = ImmutableMap.builder(); + builder2.put("ID", "2"); + builder2.put("FirstName", "first_name2"); + builder2.put("LastName", "last_name2"); + builder2.put("DOB", "1992-03-19"); + builder2.put( + "Addresses", + ImmutableList.of( + ImmutableMap.of( + "Status", "single", + "Address", "97 Kota lane", + "City", "Ottawa", + "State", "ON", + "Zip", "1h2j34", + "NumberOfYears", "3"), + ImmutableMap.of( + "Status", "couple", + "Address", "75 Malta lane", + "City", "Victoria", + "State", "AL", + "Zip", "1h2j34", + "NumberOfYears", "5"))); + + InsertAllRequest request = + InsertAllRequest.newBuilder(tableInfo.getTableId()) + .addRow(builder1.build()) + .addRow(builder2.build()) + .build(); + bigquery.insertAll(request); + } + + @Test + void testEmptyRepeatedRecordNamedQueryParameters() throws InterruptedException { + QueryParameterValue[] tuples = {}; + + QueryParameterValue repeatedRecord = + QueryParameterValue.array(tuples, StandardSQLTypeName.STRUCT); + String query = + "SELECT * FROM (SELECT STRUCT(false AS boolField) AS repeatedRecord) WHERE repeatedRecord IN UNNEST(@repeatedRecordField)"; + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter("repeatedRecordField", repeatedRecord) + .build(); + + assertThrows( + BigQueryException.class, + () -> bigquery.query(config), + "an empty array of struct query parameter shouldn't work with 'IN UNNEST'"); + } + + @Test + void testStructQuery() throws InterruptedException { // query into a table String query = String.format("SELECT RecordField FROM %s.%s", DATASET, TABLE_ID.getTable()); QueryJobConfiguration config = @@ -2229,24 +5670,26 @@ public void testStructQuery() throws InterruptedException { .setUseLegacySql(false) .build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); assertEquals(2, Iterables.size(result.getValues())); for (FieldValueList values : result.iterateAll()) { for (FieldValue value : values) { - assertEquals(null, value.getRecordValue().get("StringField").getValue()); - assertEquals(true, value.getRecordValue().get("BooleanField").getBooleanValue()); + assertNull(value.getRecordValue().get("StringField").getValue()); + assertTrue(value.getRecordValue().get("BooleanField").getBooleanValue()); } } } private static void assertsFieldValue(FieldValue record) { assertEquals(FieldValue.Attribute.RECORD, record.getAttribute()); - assertEquals(true, record.getRecordValue().get("booleanField").getBooleanValue()); + assertTrue(record.getRecordValue().get("booleanField").getBooleanValue()); assertEquals(10, record.getRecordValue().get("integerField").getLongValue()); assertEquals("test-stringField", record.getRecordValue().get("stringField").getStringValue()); } + /* TODO(prasmish): replicate relevant parts of the test case for executeSelect */ @Test - public void testNestedStructNamedQueryParameters() throws InterruptedException { + void testNestedStructNamedQueryParameters() throws InterruptedException { QueryParameterValue booleanValue = QueryParameterValue.bool(true); QueryParameterValue stringValue = QueryParameterValue.string("test-stringField"); QueryParameterValue integerValue = QueryParameterValue.int64(10); @@ -2261,7 +5704,7 @@ public void testNestedStructNamedQueryParameters() throws InterruptedException { structValue.put("string", stringValue); structValue.put("struct", recordValue); QueryParameterValue nestedRecordField = QueryParameterValue.struct(structValue); - String query = "SELECT STRUCT(@nestedRecordField) AS record"; + String query = "SELECT @nestedRecordField AS record"; QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query) .setDefaultDataset(DATASET) @@ -2269,27 +5712,26 @@ public void testNestedStructNamedQueryParameters() throws InterruptedException { .addNamedParameter("nestedRecordField", nestedRecordField) .build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); assertEquals(1, Iterables.size(result.getValues())); for (FieldValueList values : result.iterateAll()) { for (FieldValue value : values) { - assertEquals(FieldValue.Attribute.RECORD, value.getAttribute()); - for (FieldValue record : value.getRecordValue()) { - assertEquals( - true, record.getRecordValue().get(0).getRecordValue().get(0).getBooleanValue()); - assertEquals(10, record.getRecordValue().get(0).getRecordValue().get(1).getLongValue()); - assertEquals( - "test-stringField", - record.getRecordValue().get(0).getRecordValue().get(2).getStringValue()); - assertEquals(true, record.getRecordValue().get(1).getBooleanValue()); - assertEquals("test-stringField", record.getRecordValue().get(2).getStringValue()); - assertEquals(10, record.getRecordValue().get(3).getLongValue()); - } + assertEquals(Attribute.RECORD, value.getAttribute()); + assertTrue(value.getRecordValue().get(0).getRecordValue().get(0).getBooleanValue()); + assertEquals(10, value.getRecordValue().get(0).getRecordValue().get(1).getLongValue()); + assertEquals( + "test-stringField", + value.getRecordValue().get(0).getRecordValue().get(2).getStringValue()); + assertTrue(value.getRecordValue().get(1).getBooleanValue()); + assertEquals("test-stringField", value.getRecordValue().get(2).getStringValue()); + assertEquals(10, value.getRecordValue().get(3).getLongValue()); } } } + /* TODO(prasmish): replicate relevant parts of the test case for executeSelect */ @Test - public void testBytesParameter() throws Exception { + void testBytesParameter() throws Exception { String query = "SELECT BYTE_LENGTH(@p) AS length"; QueryParameterValue bytesParameter = QueryParameterValue.bytes(new byte[] {1, 3}); QueryJobConfiguration config = @@ -2299,6 +5741,7 @@ public void testBytesParameter() throws Exception { .addNamedParameter("p", bytesParameter) .build(); TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); int rowCount = 0; for (FieldValueList row : result.getValues()) { rowCount++; @@ -2309,7 +5752,30 @@ public void testBytesParameter() throws Exception { } @Test - public void testListJobs() { + void testGeographyParameter() throws Exception { + // Issues a simple ST_DISTANCE using two geopoints, one being a named geography parameter. + String query = + "SELECT ST_DISTANCE(ST_GEOGFROMTEXT(\"POINT(-122.335503 47.625536)\"), @geo) < 3000 as within3k"; + QueryParameterValue geoParameterValue = + QueryParameterValue.geography("POINT(-122.3509153 47.6495389)"); + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .addNamedParameter("geo", geoParameterValue) + .build(); + TableResult result = bigquery.query(config); + assertNotNull(result.getJobId()); + int rowCount = 0; + for (FieldValueList row : result.getValues()) { + rowCount++; + assertTrue(row.get(0).getBooleanValue()); + } + assertEquals(1, rowCount); + } + + @Test + void testListJobs() { Page jobs = bigquery.listJobs(); for (Job job : jobs.getValues()) { assertNotNull(job.getJobId()); @@ -2321,7 +5787,7 @@ public void testListJobs() { } @Test - public void testListJobsWithSelectedFields() { + void testListJobsWithSelectedFields() { Page jobs = bigquery.listJobs(JobListOption.fields(JobField.USER_EMAIL)); for (Job job : jobs.getValues()) { assertNotNull(job.getJobId()); @@ -2333,7 +5799,7 @@ public void testListJobsWithSelectedFields() { } @Test - public void testListJobsWithCreationBounding() { + void testListJobsWithCreationBounding() { long currentMillis = currentTimeMillis(); long lowerBound = currentMillis - 3600 * 1000; long upperBound = currentMillis; @@ -2349,16 +5815,16 @@ public void testListJobsWithCreationBounding() { foundMax = Math.max(job.getStatistics().getCreationTime(), foundMax); } assertTrue( - "Found min job time " + foundMin + " earlier than " + lowerBound, foundMin >= lowerBound); + foundMin >= lowerBound, "Found min job time " + foundMin + " earlier than " + lowerBound); assertTrue( - "Found max job time " + foundMax + " later than " + upperBound, foundMax <= upperBound); - assertTrue("no jobs listed", jobCount > 0); + foundMax <= upperBound, "Found max job time " + foundMax + " later than " + upperBound); + assertTrue(jobCount > 0, "no jobs listed"); } @Test - public void testCreateAndGetJob() throws InterruptedException, TimeoutException { - String sourceTableName = "test_create_and_get_job_source_table"; - String destinationTableName = "test_create_and_get_job_destination_table"; + void testCreateAndGetJob() throws InterruptedException, TimeoutException { + String sourceTableName = generateRandomName("test_create_and_get_job_source_table"); + String destinationTableName = generateRandomName("test_create_and_get_job_destination_table"); TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); @@ -2386,20 +5852,39 @@ public void testCreateAndGetJob() throws InterruptedException, TimeoutException assertNotNull(remoteJob.getStatus()); assertEquals(createdJob.getSelfLink(), remoteJob.getSelfLink()); assertEquals(createdJob.getUserEmail(), remoteJob.getUserEmail()); - assertTrue(createdTable.delete()); - - Job completedJob = remoteJob.waitFor(RetryOption.totalTimeout(Duration.ofMinutes(1))); + Job completedJob = remoteJob.waitFor(RetryOption.totalTimeoutDuration(Duration.ofMinutes(1))); assertNotNull(completedJob); assertNull(completedJob.getStatus().getError()); + assertTrue(createdTable.delete()); assertTrue(bigquery.delete(destinationTable)); } @Test - public void testCreateAndGetJobWithSelectedFields() - throws InterruptedException, TimeoutException { - String sourceTableName = "test_create_and_get_job_with_selected_fields_source_table"; - String destinationTableName = "test_create_and_get_job_with_selected_fields_destination_table"; + void testCreateJobAndWaitForWithRetryOptions() throws InterruptedException, TimeoutException { + // Note: This only tests the non failure/retry case. For retry cases, see unit tests with mocked + // RPC calls. + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder("SELECT CURRENT_TIMESTAMP() as ts") + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .build(); + + BigQueryRetryConfig bigQueryRetryConfig = BigQueryRetryConfig.newBuilder().build(); + JobOption bigQueryRetryConfigOption = JobOption.bigQueryRetryConfig(bigQueryRetryConfig); + JobOption retryOptions = JobOption.retryOptions(RetryOption.maxAttempts(1)); + + Job job = bigquery.create(JobInfo.of(config), bigQueryRetryConfigOption, retryOptions); + job = job.waitFor(bigQueryRetryConfig); + assertEquals(DONE, job.getStatus().getState()); + } + + @Test + void testCreateAndGetJobWithSelectedFields() throws InterruptedException, TimeoutException { + String sourceTableName = + generateRandomName("test_create_and_get_job_with_selected_fields_source_table"); + String destinationTableName = + generateRandomName("test_create_and_get_job_with_selected_fields_destination_table"); TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); @@ -2434,21 +5919,20 @@ public void testCreateAndGetJobWithSelectedFields() assertNull(remoteJob.getStatus()); assertNull(remoteJob.getSelfLink()); assertNull(remoteJob.getUserEmail()); - assertTrue(createdTable.delete()); - Job completedJob = remoteJob.waitFor( - RetryOption.initialRetryDelay(Duration.ofSeconds(1)), - RetryOption.totalTimeout(Duration.ofMinutes(1))); + RetryOption.initialRetryDelayDuration(Duration.ofSeconds(1)), + RetryOption.totalTimeoutDuration(Duration.ofMinutes(1))); assertNotNull(completedJob); + assertTrue(createdTable.delete()); assertNull(completedJob.getStatus().getError()); assertTrue(bigquery.delete(destinationTable)); } @Test - public void testCopyJob() throws InterruptedException, TimeoutException { - String sourceTableName = "test_copy_job_source_table"; - String destinationTableName = "test_copy_job_destination_table"; + void testCopyJob() throws InterruptedException, TimeoutException { + String sourceTableName = generateRandomName("test_copy_job_source_table"); + String destinationTableName = generateRandomName("test_copy_job_destination_table"); TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); TableInfo tableInfo = TableInfo.of(sourceTable, tableDefinition); @@ -2456,11 +5940,18 @@ public void testCopyJob() throws InterruptedException, TimeoutException { assertNotNull(createdTable); assertEquals(DATASET, createdTable.getTableId().getDataset()); assertEquals(sourceTableName, createdTable.getTableId().getTable()); + TableId destinationTable = TableId.of(DATASET, destinationTableName); CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, sourceTable); Job remoteJob = bigquery.create(JobInfo.of(configuration)); remoteJob = remoteJob.waitFor(); assertNull(remoteJob.getStatus().getError()); + + CopyStatistics copyStatistics = remoteJob.getStatistics(); + assertNotNull(copyStatistics); + assertEquals(0, copyStatistics.getCopiedRows().longValue()); + assertEquals(0, copyStatistics.getCopiedLogicalBytes().longValue()); + Table remoteTable = bigquery.getTable(DATASET, destinationTableName); assertNotNull(remoteTable); assertEquals(destinationTable.getDataset(), remoteTable.getTableId().getDataset()); @@ -2471,9 +5962,119 @@ public void testCopyJob() throws InterruptedException, TimeoutException { } @Test - public void testCopyJobWithLabels() throws InterruptedException { - String sourceTableName = "test_copy_job_source_table_label"; - String destinationTableName = "test_copy_job_destination_table_label"; + void testCopyJobStatistics() throws InterruptedException, TimeoutException { + String sourceTableName = generateRandomName("test_copy_job_statistics_source_table"); + String destinationTableName = generateRandomName("test_copy_job_statistics_destination_table"); + + QueryJobConfiguration createTable = + QueryJobConfiguration.newBuilder( + String.format( + "CREATE TABLE %s AS SELECT num FROM UNNEST(GENERATE_ARRAY(0,5)) as num", + sourceTableName)) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .build(); + bigquery.query(createTable); + + // Copy the created table. + TableId sourceTable = TableId.of(DATASET, sourceTableName); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + CopyJobConfiguration configuration = CopyJobConfiguration.of(destinationTable, sourceTable); + Job remoteJob = bigquery.create(JobInfo.of(configuration)); + remoteJob = remoteJob.waitFor(); + assertNull(remoteJob.getStatus().getError()); + + CopyStatistics copyStatistics = remoteJob.getStatistics(); + assertNotNull(copyStatistics); + assertEquals(6, copyStatistics.getCopiedRows().longValue()); + // Assert != 0 since copied logical bytes is may return non-deterministic value due to how the + // data is represented. + assertNotEquals(0, copyStatistics.getCopiedLogicalBytes().longValue()); + } + + @Test + void testSnapshotTableCopyJob() throws InterruptedException { + String sourceTableName = generateRandomName("test_copy_job_base_table"); + String ddlTableName = TABLE_ID_SIMPLE.getTable(); + // this creates a snapshot table at specified snapshotTime + String snapshotTableName = generateRandomName("test_snapshot_table"); + // Create source table with some data in it + String ddlQuery = + String.format( + "CREATE OR REPLACE TABLE %s (" + + "TimestampField TIMESTAMP OPTIONS(description='TimestampDescription'), " + + "StringField STRING OPTIONS(description='StringDescription'), " + + "BooleanField BOOLEAN OPTIONS(description='BooleanDescription') " + + ") AS SELECT * FROM %s", + sourceTableName, ddlTableName); + QueryJobConfiguration ddlConfig = + QueryJobConfiguration.newBuilder(ddlQuery).setDefaultDataset(DatasetId.of(DATASET)).build(); + TableId sourceTableId = TableId.of(DATASET, sourceTableName); + TableResult result = bigquery.query(ddlConfig); + assertNotNull(result.getJobId()); + assertEquals(SIMPLE_TABLE_SCHEMA, result.getSchema()); + Table remoteTable = bigquery.getTable(DATASET, sourceTableName); + assertNotNull(remoteTable); + + // Create snapshot table using source table as the base table + TableId snapshotTableId = TableId.of(DATASET, snapshotTableName); + CopyJobConfiguration snapshotConfiguration = + CopyJobConfiguration.newBuilder(snapshotTableId, sourceTableId) + .setOperationType("SNAPSHOT") + .build(); + Job createdJob = bigquery.create(JobInfo.of(snapshotConfiguration)); + CopyJobConfiguration createdConfiguration = createdJob.getConfiguration(); + assertNotNull(createdConfiguration.getSourceTables()); + assertNotNull(createdConfiguration.getOperationType()); + assertNotNull(createdConfiguration.getDestinationTable()); + Job completedJob = createdJob.waitFor(); + assertNull(completedJob.getStatus().getError()); + Table snapshotTable = bigquery.getTable(DATASET, snapshotTableName); + assertNotNull(snapshotTable); + assertEquals(snapshotTableId.getDataset(), snapshotTable.getTableId().getDataset()); + assertEquals(snapshotTableName, snapshotTable.getTableId().getTable()); + assertTrue(snapshotTable.getDefinition() instanceof SnapshotTableDefinition); + assertEquals(SIMPLE_TABLE_SCHEMA, snapshotTable.getDefinition().getSchema()); + assertNotNull(((SnapshotTableDefinition) snapshotTable.getDefinition()).getSnapshotTime()); + assertEquals( + sourceTableName, + ((SnapshotTableDefinition) snapshotTable.getDefinition()).getBaseTableId().getTable()); + + // Restore base table to a new table + String restoredTableName = generateRandomName("test_restore_table"); + TableId restoredTableId = TableId.of(DATASET, restoredTableName); + CopyJobConfiguration restoreConfiguration = + CopyJobConfiguration.newBuilder(restoredTableId, snapshotTableId) + .setOperationType("RESTORE") + .build(); + Job createdRestoreJob = bigquery.create(JobInfo.of(restoreConfiguration)); + CopyJobConfiguration createdRestoreConfiguration = createdRestoreJob.getConfiguration(); + assertEquals( + restoreConfiguration.getOperationType(), createdRestoreConfiguration.getOperationType()); + assertEquals( + restoreConfiguration.getDestinationTable().getTable(), + createdRestoreConfiguration.getDestinationTable().getTable()); + Job completedRestoreJob = createdRestoreJob.waitFor(); + assertNull(completedRestoreJob.getStatus().getError()); + Table restoredTable = bigquery.getTable(DATASET, restoredTableName); + assertNotNull(restoredTable); + assertEquals(restoredTableId.getDataset(), restoredTable.getTableId().getDataset()); + assertEquals(restoredTableName, restoredTable.getTableId().getTable()); + assertEquals(SIMPLE_TABLE_SCHEMA, restoredTable.getDefinition().getSchema()); + assertEquals(snapshotTable.getNumBytes(), restoredTable.getNumBytes()); + assertEquals(snapshotTable.getNumRows(), restoredTable.getNumRows()); + + // Clean up + assertTrue(remoteTable.delete()); + assertTrue(restoredTable.delete()); + assertTrue(snapshotTable.delete()); + } + + @Test + void testCopyJobWithLabelsAndExpTime() throws InterruptedException { + String destExpiryTime = "2099-12-31T23:59:59.999999999Z"; + String sourceTableName = generateRandomName("test_copy_job_source_table_label"); + String destinationTableName = generateRandomName("test_copy_job_destination_table_label"); Map labels = ImmutableMap.of("test_job_name", "test_copy_job"); TableId sourceTable = TableId.of(DATASET, sourceTableName); StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); @@ -2482,21 +6083,27 @@ public void testCopyJobWithLabels() throws InterruptedException { assertNotNull(createdTable); TableId destinationTable = TableId.of(DATASET, destinationTableName); CopyJobConfiguration configuration = - CopyJobConfiguration.newBuilder(destinationTable, sourceTable).setLabels(labels).build(); + CopyJobConfiguration.newBuilder(destinationTable, sourceTable) + .setLabels(labels) + .setDestinationExpirationTime(destExpiryTime) + .build(); Job remoteJob = bigquery.create(JobInfo.of(configuration)); remoteJob = remoteJob.waitFor(); assertNull(remoteJob.getStatus().getError()); CopyJobConfiguration copyJobConfiguration = remoteJob.getConfiguration(); assertEquals(labels, copyJobConfiguration.getLabels()); + assertNotNull(copyJobConfiguration.getDestinationExpirationTime()); + assertEquals(destExpiryTime, copyJobConfiguration.getDestinationExpirationTime()); Table remoteTable = bigquery.getTable(DATASET, destinationTableName); assertNotNull(remoteTable); assertTrue(createdTable.delete()); assertTrue(remoteTable.delete()); } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test public void testQueryJob() throws InterruptedException, TimeoutException { - String tableName = "test_query_job_table"; + String tableName = generateRandomName("test_query_job_table"); String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, tableName); QueryJobConfiguration configuration = @@ -2509,6 +6116,7 @@ public void testQueryJob() throws InterruptedException, TimeoutException { assertNull(remoteJob.getStatus().getError()); TableResult result = remoteJob.getQueryResults(); + assertNotNull(result.getJobId()); assertEquals(QUERY_RESULT_SCHEMA, result.getSchema()); int rowCount = 0; for (FieldValueList row : result.getValues()) { @@ -2520,19 +6128,28 @@ public void testQueryJob() throws InterruptedException, TimeoutException { assertEquals(FieldValue.Attribute.PRIMITIVE, booleanCell.getAttribute()); assertEquals(1408452095220000L, timestampCell.getTimestampValue()); assertEquals("stringValue", stringCell.getStringValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); rowCount++; } assertEquals(2, rowCount); assertTrue(bigquery.delete(destinationTable)); Job queryJob = bigquery.getJob(remoteJob.getJobId()); JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); + if (statistics.getBiEngineStats() != null) { + assertEquals("DISABLED", statistics.getBiEngineStats().getBiEngineMode()); + assertEquals( + "OTHER_REASON", statistics.getBiEngineStats().getBiEngineReasons().get(0).getCode()); + assertEquals( + "Only SELECT queries without a destination table can be accelerated.", + statistics.getBiEngineStats().getBiEngineReasons().get(0).getMessage()); + } assertNotNull(statistics.getQueryPlan()); } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testQueryJobWithConnectionProperties() throws InterruptedException { - String tableName = "test_query_job_table_connection_properties"; + void testQueryJobWithConnectionProperties() throws InterruptedException { + String tableName = generateRandomName("test_query_job_table_connection_properties"); String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, tableName); QueryJobConfiguration configuration = @@ -2549,9 +6166,10 @@ public void testQueryJobWithConnectionProperties() throws InterruptedException { assertTrue(bigquery.delete(destinationTable)); } + /* TODO(prasmish): replicate the entire test case for executeSelect */ @Test - public void testQueryJobWithLabels() throws InterruptedException, TimeoutException { - String tableName = "test_query_job_table"; + void testQueryJobWithLabels() throws InterruptedException, TimeoutException { + String tableName = generateRandomName("test_query_job_table"); String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); Map labels = ImmutableMap.of("test-job-name", "test-query-job"); TableId destinationTable = TableId.of(DATASET, tableName); @@ -2573,8 +6191,36 @@ public void testQueryJobWithLabels() throws InterruptedException, TimeoutExcepti } @Test - public void testQueryJobWithRangePartitioning() throws InterruptedException { - String tableName = "test_query_job_table_rangepartitioning"; + void testQueryJobWithSearchReturnsSearchStatisticsUnused() throws InterruptedException { + String tableName = generateRandomName("test_query_job_table"); + String query = + "SELECT * FROM " + TABLE_ID.getTable() + " WHERE search(StringField, \"stringValue\")"; + TableId destinationTable = TableId.of(DATASET, tableName); + try { + QueryJobConfiguration configuration = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DatasetId.of(DATASET)) + .setDestinationTable(destinationTable) + .build(); + Job remoteJob = bigquery.create(JobInfo.of(configuration)); + remoteJob = remoteJob.waitFor(); + assertNull(remoteJob.getStatus().getError()); + JobStatistics.QueryStatistics stats = remoteJob.getStatistics(); + assertNotNull(stats.getSearchStats()); + assertEquals("UNUSED", stats.getSearchStats().getIndexUsageMode()); + assertNotNull(stats.getSearchStats().getIndexUnusedReasons()); + assertEquals( + "INDEX_CONFIG_NOT_AVAILABLE", + stats.getSearchStats().getIndexUnusedReasons().get(0).getCode()); + } finally { + bigquery.delete(destinationTable); + } + } + + /* TODO(prasmish): replicate the entire test case for executeSelect */ + @Test + void testQueryJobWithRangePartitioning() throws InterruptedException { + String tableName = generateRandomName("test_query_job_table_rangepartitioning"); String query = "SELECT IntegerField, TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); @@ -2598,8 +6244,8 @@ public void testQueryJobWithRangePartitioning() throws InterruptedException { } @Test - public void testLoadJobWithRangePartitioning() throws InterruptedException { - String tableName = "test_load_job_table_rangepartitioning"; + void testLoadJobWithRangePartitioning() throws InterruptedException { + String tableName = generateRandomName("test_load_job_table_rangepartitioning"); TableId destinationTable = TableId.of(DATASET, tableName); try { LoadJobConfiguration configuration = @@ -2622,8 +6268,56 @@ public void testLoadJobWithRangePartitioning() throws InterruptedException { } @Test - public void testQueryJobWithDryRun() throws InterruptedException, TimeoutException { - String tableName = "test_query_job_table"; + void testLoadJobWithDecimalTargetTypes() throws InterruptedException { + String tableName = generateRandomName("test_load_job_table_parquet_decimalTargetTypes"); + TableId destinationTable = TableId.of(DATASET, tableName); + String sourceUri = "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/numeric/numeric_38_12.parquet"; + try { + LoadJobConfiguration configuration = + LoadJobConfiguration.newBuilder(destinationTable, sourceUri, FormatOptions.parquet()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setDecimalTargetTypes(ImmutableList.of("NUMERIC", "BIGNUMERIC", "STRING")) + .build(); + Job job = bigquery.create(JobInfo.of(configuration)); + job = job.waitFor(); + assertNull(job.getStatus().getError()); + LoadJobConfiguration loadJobConfiguration = job.getConfiguration(); + assertEquals( + ImmutableList.of("NUMERIC", "BIGNUMERIC", "STRING"), + loadJobConfiguration.getDecimalTargetTypes()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertEquals( + "BIGNUMERIC", + remoteTable.getDefinition().getSchema().getFields().get(0).getType().toString()); + } finally { + bigquery.delete(destinationTable); + } + } + + @Test + void testExternalTableWithDecimalTargetTypes() throws InterruptedException { + String tableName = generateRandomName("test_create_external_table_parquet_decimalTargetTypes"); + TableId destinationTable = TableId.of(DATASET, tableName); + String sourceUri = "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/numeric/numeric_38_12.parquet"; + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.newBuilder(sourceUri, FormatOptions.parquet()) + .setDecimalTargetTypes(ImmutableList.of("NUMERIC", "BIGNUMERIC", "STRING")) + .build(); + TableInfo tableInfo = TableInfo.of(destinationTable, externalTableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertEquals( + "BIGNUMERIC", + remoteTable.getDefinition().getSchema().getFields().get(0).getType().toString()); + assertTrue(remoteTable.delete()); + } + + @Test + void testQueryJobWithDryRun() throws InterruptedException, TimeoutException { + String tableName = generateRandomName("test_query_job_table"); String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, tableName); QueryJobConfiguration configuration = @@ -2634,13 +6328,14 @@ public void testQueryJobWithDryRun() throws InterruptedException, TimeoutExcepti .build(); Job remoteJob = bigquery.create(JobInfo.of(configuration)); assertNull(remoteJob.getJobId().getJob()); + remoteJob.getStatistics(); assertEquals(DONE, remoteJob.getStatus().getState()); assertNotNull(remoteJob.getConfiguration()); } @Test - public void testExtractJob() throws InterruptedException, TimeoutException { - String tableName = "test_export_job_table"; + void testExtractJob() throws InterruptedException, TimeoutException { + String tableName = generateRandomName("test_export_job_table"); TableId destinationTable = TableId.of(DATASET, tableName); Map labels = ImmutableMap.of("test-job-name", "test-load-extract-job"); LoadJobConfiguration configuration = @@ -2653,6 +6348,8 @@ public void testExtractJob() throws InterruptedException, TimeoutException { assertNull(remoteLoadJob.getStatus().getError()); LoadJobConfiguration loadJobConfiguration = remoteLoadJob.getConfiguration(); assertEquals(labels, loadJobConfiguration.getLabels()); + LoadStatistics loadStatistics = remoteLoadJob.getStatistics(); + assertNotNull(loadStatistics); ExtractJobConfiguration extractConfiguration = ExtractJobConfiguration.newBuilder(destinationTable, "gs://" + BUCKET + "/" + EXTRACT_FILE) @@ -2662,6 +6359,13 @@ public void testExtractJob() throws InterruptedException, TimeoutException { remoteExtractJob = remoteExtractJob.waitFor(); assertNull(remoteExtractJob.getStatus().getError()); + ExtractStatistics extractStatistics = remoteExtractJob.getStatistics(); + assertNotNull(extractStatistics); + assertEquals(1L, extractStatistics.getDestinationUriFileCounts().size()); + assertEquals( + loadStatistics.getOutputBytes().longValue(), extractStatistics.getInputBytes().longValue()); + assertThat(extractStatistics.getTotalSlotMs()).isGreaterThan(0L); + String extractedCsv = new String(storage.readAllBytes(BUCKET, EXTRACT_FILE), StandardCharsets.UTF_8); assertEquals( @@ -2670,7 +6374,7 @@ public void testExtractJob() throws InterruptedException, TimeoutException { } @Test - public void testExtractJobWithModel() throws InterruptedException { + void testExtractJobWithModel() throws InterruptedException { String modelName = RemoteBigQueryHelper.generateModelName(); String sql = "CREATE MODEL `" @@ -2680,7 +6384,7 @@ public void testExtractJobWithModel() throws InterruptedException { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " @@ -2707,8 +6411,8 @@ public void testExtractJobWithModel() throws InterruptedException { } @Test - public void testExtractJobWithLabels() throws InterruptedException, TimeoutException { - String tableName = "test_export_job_table_label"; + void testExtractJobWithLabels() throws InterruptedException, TimeoutException { + String tableName = generateRandomName("test_export_job_table_label"); Map labels = ImmutableMap.of("test_job_name", "test_export_job"); TableId destinationTable = TableId.of(DATASET, tableName); LoadJobConfiguration configuration = @@ -2733,8 +6437,8 @@ public void testExtractJobWithLabels() throws InterruptedException, TimeoutExcep } @Test - public void testCancelJob() throws InterruptedException, TimeoutException { - String destinationTableName = "test_cancel_query_job_table"; + void testCancelJob() throws InterruptedException, TimeoutException { + String destinationTableName = generateRandomName("test_cancel_query_job_table"); String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); TableId destinationTable = TableId.of(DATASET, destinationTableName); QueryJobConfiguration configuration = @@ -2747,13 +6451,13 @@ public void testCancelJob() throws InterruptedException, TimeoutException { } @Test - public void testCancelNonExistingJob() { + void testCancelNonExistingJob() { assertFalse(bigquery.cancel("test_cancel_non_existing_job")); } @Test - public void testInsertFromFile() throws InterruptedException, IOException, TimeoutException { - String destinationTableName = "test_insert_from_file_table"; + void testInsertFromFile() throws InterruptedException, IOException, TimeoutException { + String destinationTableName = generateRandomName("test_insert_from_file_table"); TableId tableId = TableId.of(DATASET, destinationTableName); WriteChannelConfiguration configuration = WriteChannelConfiguration.newBuilder(tableId) @@ -2761,20 +6465,17 @@ public void testInsertFromFile() throws InterruptedException, IOException, Timeo .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) .setSchema(TABLE_SCHEMA) .build(); - TableDataWriteChannel channel = bigquery.writer(configuration); - try { + try (TableDataWriteChannel channel = bigquery.writer(configuration)) { // A zero byte write should not throw an exception. assertEquals(0, channel.write(ByteBuffer.wrap("".getBytes(StandardCharsets.UTF_8)))); - } finally { - // Force the channel to flush by calling `close`. - channel.close(); } - channel = bigquery.writer(configuration); + TableDataWriteChannel channel = bigquery.writer(configuration); try { channel.write(ByteBuffer.wrap(JSON_CONTENT.getBytes(StandardCharsets.UTF_8))); } finally { channel.close(); } + // Channel must close before retrieving the job Job job = channel.getJob().waitFor(); LoadStatistics statistics = job.getStatistics(); assertEquals(1L, statistics.getInputFiles().longValue()); @@ -2810,13 +6511,13 @@ public void testInsertFromFile() throws InterruptedException, IOException, Timeo assertEquals("stringValue", stringCell.getStringValue()); assertEquals(0, integerArrayCell.getRepeatedValue().get(0).getLongValue()); assertEquals(1, integerArrayCell.getRepeatedValue().get(1).getLongValue()); - assertEquals(false, booleanCell.getBooleanValue()); + assertFalse(booleanCell.getBooleanValue()); assertArrayEquals(BYTES, bytesCell.getBytesValue()); assertEquals(-14182916000000L, recordCell.getRecordValue().get(0).getTimestampValue()); assertTrue(recordCell.getRecordValue().get(1).isNull()); assertEquals(1, recordCell.getRecordValue().get(2).getRepeatedValue().get(0).getLongValue()); assertEquals(0, recordCell.getRecordValue().get(2).getRepeatedValue().get(1).getLongValue()); - assertEquals(true, recordCell.getRecordValue().get(3).getBooleanValue()); + assertTrue(recordCell.getRecordValue().get(3).getBooleanValue()); assertEquals(3, integerCell.getLongValue()); assertEquals(1.2, floatCell.getDoubleValue(), 0.0001); assertEquals("POINT(-122.35022 47.649154)", geographyCell.getStringValue()); @@ -2828,9 +6529,8 @@ public void testInsertFromFile() throws InterruptedException, IOException, Timeo } @Test - public void testInsertFromFileWithLabels() - throws InterruptedException, IOException, TimeoutException { - String destinationTableName = "test_insert_from_file_table_with_labels"; + void testInsertFromFileWithLabels() throws InterruptedException, IOException, TimeoutException { + String destinationTableName = generateRandomName("test_insert_from_file_table_with_labels"); TableId tableId = TableId.of(DATASET, destinationTableName); WriteChannelConfiguration configuration = WriteChannelConfiguration.newBuilder(tableId) @@ -2839,20 +6539,17 @@ public void testInsertFromFileWithLabels() .setSchema(TABLE_SCHEMA) .setLabels(LABELS) .build(); - TableDataWriteChannel channel = bigquery.writer(configuration); - try { + try (TableDataWriteChannel channel = bigquery.writer(configuration)) { // A zero byte write should not throw an exception. assertEquals(0, channel.write(ByteBuffer.wrap("".getBytes(StandardCharsets.UTF_8)))); - } finally { - // Force the channel to flush by calling `close`. - channel.close(); } - channel = bigquery.writer(configuration); + TableDataWriteChannel channel = bigquery.writer(configuration); try { channel.write(ByteBuffer.wrap(JSON_CONTENT.getBytes(StandardCharsets.UTF_8))); } finally { channel.close(); } + // Channel must close before retrieving the job Job job = channel.getJob().waitFor(); LoadJobConfiguration jobConfiguration = job.getConfiguration(); assertEquals(TABLE_SCHEMA, jobConfiguration.getSchema()); @@ -2862,22 +6559,56 @@ public void testInsertFromFileWithLabels() } @Test - public void testLocation() throws Exception { + void testInsertWithDecimalTargetTypes() + throws InterruptedException, IOException, TimeoutException { + String destinationTableName = + generateRandomName("test_insert_from_file_table_with_decimal_target_type"); + TableId tableId = TableId.of(DATASET, destinationTableName); + WriteChannelConfiguration configuration = + WriteChannelConfiguration.newBuilder(tableId) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setAutodetect(true) + .setDecimalTargetTypes(ImmutableList.of("STRING", "NUMERIC", "BIGNUMERIC")) + .build(); + TableDataWriteChannel channel = bigquery.writer(configuration); + try { + channel.write(ByteBuffer.wrap("foo".getBytes(StandardCharsets.UTF_8))); + } finally { + channel.close(); + } + // Channel must close before retrieving the job + Job job = channel.getJob().waitFor(); + LoadJobConfiguration jobConfiguration = job.getConfiguration(); + assertNull(job.getStatus().getError()); + assertEquals( + ImmutableList.of("STRING", "NUMERIC", "BIGNUMERIC"), + jobConfiguration.getDecimalTargetTypes()); + assertTrue(bigquery.delete(tableId)); + } + + @Test + void testLocation() throws Exception { String location = "EU"; String wrongLocation = "US"; assertThat(location).isNotEqualTo(wrongLocation); + Tracer tracer = otel.getTracer("Test Tracer"); + BigQuery otelBigquery = + bigquery.getOptions().toBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build() + .getService(); + + String datasetName = "locationset_" + UUID.randomUUID().toString().replace("-", "_"); Dataset dataset = - bigquery.create( - DatasetInfo.newBuilder("locationset_" + UUID.randomUUID().toString().replace("-", "_")) - .setLocation(location) - .build()); + otelBigquery.create(DatasetInfo.newBuilder(datasetName).setLocation(location).build()); try { TableId tableId = TableId.of(dataset.getDatasetId().getDataset(), "sometable"); Schema schema = Schema.of(Field.of("name", LegacySQLTypeName.STRING)); TableDefinition tableDef = StandardTableDefinition.of(schema); - Table table = bigquery.create(TableInfo.newBuilder(tableId, tableDef).build()); + Table table = otelBigquery.create(TableInfo.newBuilder(tableId, tableDef).build()); String query = String.format( @@ -2887,52 +6618,47 @@ public void testLocation() throws Exception { table.getTableId().getTable()); // Test create/get - { - Job job = - bigquery.create( - JobInfo.of( - JobId.newBuilder().setLocation(location).build(), - QueryJobConfiguration.of(query))); - job = job.waitFor(); - assertThat(job.getStatus().getError()).isNull(); - - assertThat(job.getJobId().getLocation()).isEqualTo(location); - - JobId jobId = job.getJobId(); - JobId wrongId = jobId.toBuilder().setLocation(wrongLocation).build(); - - // Getting with location should work. - assertThat(bigquery.getJob(jobId)).isNotNull(); - // Getting with wrong location shouldn't work. - assertThat(bigquery.getJob(wrongId)).isNull(); - - // Cancelling with location should work. (Cancelling already finished job is fine.) - assertThat(bigquery.cancel(jobId)).isTrue(); - // Cancelling with wrong location shouldn't work. - assertThat(bigquery.cancel(wrongId)).isFalse(); - } + Job job = + otelBigquery.create( + JobInfo.of( + JobId.newBuilder().setLocation(location).build(), + QueryJobConfiguration.of(query))); + job = job.waitFor(); + assertThat(job.getStatus().getError()).isNull(); + + assertThat(job.getJobId().getLocation()).isEqualTo(location); + + JobId jobId = job.getJobId(); + JobId wrongId = jobId.toBuilder().setLocation(wrongLocation).build(); + + // Getting with location should work. + assertThat(otelBigquery.getJob(jobId)).isNotNull(); + // Getting with wrong location shouldn't work. + assertThat(otelBigquery.getJob(wrongId)).isNull(); + + // Cancelling with location should work. (Cancelling already finished job is fine.) + assertThat(otelBigquery.cancel(jobId)).isTrue(); + // Cancelling with wrong location shouldn't work. + assertThat(otelBigquery.cancel(wrongId)).isFalse(); // Test query - { - assertThat( - bigquery - .query( - QueryJobConfiguration.of(query), - JobId.newBuilder().setLocation(location).build()) - .iterateAll()) - .isEmpty(); - - try { - bigquery - .query( - QueryJobConfiguration.of(query), - JobId.newBuilder().setLocation(wrongLocation).build()) - .iterateAll(); - fail("querying a table with wrong location shouldn't work"); - } catch (BigQueryException e) { - // Nothing to do - } - } + assertThat( + otelBigquery + .query( + QueryJobConfiguration.of(query), + JobId.newBuilder().setLocation(location).build()) + .iterateAll()) + .isEmpty(); + + assertThrows( + BigQueryException.class, + () -> + otelBigquery + .query( + QueryJobConfiguration.of(query), + JobId.newBuilder().setLocation(wrongLocation).build()) + .iterateAll(), + "querying a table with wrong location shouldn't work"); // Test write { @@ -2941,21 +6667,1257 @@ public void testLocation() throws Exception { .setFormatOptions(FormatOptions.csv()) .build(); try (TableDataWriteChannel writer = - bigquery.writer( + otelBigquery.writer( JobId.newBuilder().setLocation(location).build(), writeChannelConfiguration)) { writer.write(ByteBuffer.wrap("foo".getBytes())); + assertEquals( + OTEL_ATTRIBUTES + .get("com.google.cloud.bigquery.TableDataWriteChannel.open") + .get(AttributeKey.stringKey("bq.job.location")), + location); } - try { - bigquery.writer( - JobId.newBuilder().setLocation(wrongLocation).build(), writeChannelConfiguration); - fail("writing to a table with wrong location shouldn't work"); - } catch (BigQueryException e) { - // Nothing to do - } + assertThrows( + BigQueryException.class, + () -> { + try (TableDataWriteChannel ignore = + otelBigquery.writer( + JobId.newBuilder().setLocation(wrongLocation).build(), + writeChannelConfiguration)) {} + }, + "writing to a table with wrong location shouldn't work"); } } finally { - bigquery.delete(dataset.getDatasetId(), DatasetDeleteOption.deleteContents()); + RemoteBigQueryHelper.forceDelete(bigquery, datasetName); + } + } + + @Test + void testWriteChannelPreserveAsciiControlCharacters() + throws InterruptedException, IOException, TimeoutException { + String destinationTableName = + generateRandomName("test_write_channel_preserve_ascii_control_characters"); + TableId tableId = TableId.of(DATASET, destinationTableName); + WriteChannelConfiguration configuration = + WriteChannelConfiguration.newBuilder(tableId) + .setFormatOptions( + FormatOptions.csv().toBuilder().setPreserveAsciiControlCharacters(true).build()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setSchema(SIMPLE_SCHEMA) + .build(); + TableDataWriteChannel channel = bigquery.writer(configuration); + try { + channel.write(ByteBuffer.wrap("\u0000".getBytes(StandardCharsets.UTF_8))); + } finally { + channel.close(); + } + // Channel must close before retrieving the job + Job job = channel.getJob().waitFor(); + assertNull(job.getStatus().getError()); + Page rows = bigquery.listTableData(tableId); + FieldValueList row = rows.getValues().iterator().next(); + assertEquals("\u0000", row.get(0).getStringValue()); + assertTrue(bigquery.delete(tableId)); + } + + @Test + void testLoadJobPreserveAsciiControlCharacters() throws InterruptedException { + String destinationTableName = + generateRandomName("test_load_job_preserve_ascii_control_characters"); + TableId destinationTable = TableId.of(DATASET, destinationTableName); + + try { + LoadJobConfiguration configuration = + LoadJobConfiguration.newBuilder(destinationTable, "gs://" + BUCKET + "/" + LOAD_FILE_NULL) + .setFormatOptions( + CsvOptions.newBuilder().setPreserveAsciiControlCharacters(true).build()) + .setSchema(SIMPLE_SCHEMA) + .build(); + Job remoteLoadJob = bigquery.create(JobInfo.of(configuration)); + remoteLoadJob = remoteLoadJob.waitFor(); + assertNull(remoteLoadJob.getStatus().getError()); + } finally { + assertTrue(bigquery.delete(destinationTable)); + } + } + + @Test + void testReferenceFileSchemaUriForAvro() { + try { + String destinationTableName = generateRandomName("test_reference_file_schema_avro"); + TableId tableId = TableId.of(DATASET, destinationTableName); + Schema expectedSchema = + Schema.of( + Field.newBuilder("username", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build(), + Field.newBuilder("tweet", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build(), + Field.newBuilder("timestamp", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build(), + Field.newBuilder("likes", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + + // By default, the table should have c-twitter schema because it is lexicographically last. + // a-twitter schema (username, tweet, timestamp, likes) + // b-twitter schema (username, tweet, timestamp) + // c-twitter schema (username, tweet) + List SOURCE_URIS = + ImmutableList.of( + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/a-twitter.avro", + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/b-twitter.avro", + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/c-twitter.avro"); + + // Because referenceFileSchemaUri is set as a-twitter, the table will have a-twitter schema + String referenceFileSchema = + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/a-twitter.avro"; + + LoadJobConfiguration loadJobConfiguration = + LoadJobConfiguration.newBuilder(tableId, SOURCE_URIS, FormatOptions.avro()) + .setReferenceFileSchemaUri(referenceFileSchema) + .build(); + + Job job = bigquery.create(JobInfo.of(loadJobConfiguration)); + // Blocks until this load table job completes its execution, either failing or succeeding. + job = job.waitFor(); + assertTrue(job.isDone()); + + LoadJobConfiguration actualLoadJobConfiguration = job.getConfiguration(); + Table generatedTable = bigquery.getTable(actualLoadJobConfiguration.getDestinationTable()); + + assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); + // clean up after test to avoid conflict with other tests + assertTrue(bigquery.delete(tableId)); + } catch (BigQueryException | InterruptedException e) { + System.out.println("Column not added during load append \n" + e); + } + } + + @Test + void testReferenceFileSchemaUriForParquet() { + try { + String destinationTableName = generateRandomName("test_reference_file_schema_parquet"); + TableId tableId = TableId.of(DATASET, destinationTableName); + Schema expectedSchema = + Schema.of( + Field.newBuilder("username", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build(), + Field.newBuilder("tweet", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build(), + Field.newBuilder("timestamp", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build(), + Field.newBuilder("likes", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + + // By default, the table should have c-twitter schema because it is lexicographically last. + // a-twitter schema (username, tweet, timestamp, likes) + // b-twitter schema (username, tweet, timestamp) + // c-twitter schema (username, tweet) + List SOURCE_URIS = + ImmutableList.of( + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/a-twitter.parquet", + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/b-twitter.parquet", + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/c-twitter.parquet"); + + // Because referenceFileSchemaUri is set as a-twitter, the table will have a-twitter schema + String referenceFileSchema = + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/a-twitter.parquet"; + + LoadJobConfiguration loadJobConfiguration = + LoadJobConfiguration.newBuilder(tableId, SOURCE_URIS, FormatOptions.parquet()) + .setReferenceFileSchemaUri(referenceFileSchema) + .build(); + + Job job = bigquery.create(JobInfo.of(loadJobConfiguration)); + // Blocks until this load table job completes its execution, either failing or succeeding. + job = job.waitFor(); + assertTrue(job.isDone()); + LoadJobConfiguration actualLoadJobConfiguration = job.getConfiguration(); + Table generatedTable = bigquery.getTable(actualLoadJobConfiguration.getDestinationTable()); + + assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); + // clean up after test to avoid conflict with other tests + assertTrue(bigquery.delete(tableId)); + } catch (BigQueryException | InterruptedException e) { + System.out.println("Column not added during load append \n" + e); } } + + @Test + void testCreateExternalTableWithReferenceFileSchemaAvro() { + String destinationTableName = + generateRandomName("test_create_external_table_reference_file_schema_avro"); + TableId tableId = TableId.of(DATASET, destinationTableName); + Schema expectedSchema = + Schema.of( + Field.newBuilder("username", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build(), + Field.newBuilder("tweet", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build(), + Field.newBuilder("timestamp", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build(), + Field.newBuilder("likes", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + String CLOUD_SAMPLES_DATA = "cloud-samples-data"; + + // By default, the table should have c-twitter schema because it is lexicographically last. + // a-twitter schema (username, tweet, timestamp, likes) + // b-twitter schema (username, tweet, timestamp) + // c-twitter schema (username, tweet) + String SOURCE_URI = + "gs://" + CLOUD_SAMPLES_DATA + "/bigquery/federated-formats-reference-file-schema/*.avro"; + + // Because referenceFileSchemaUri is set as a-twitter, the table will have a-twitter schema + String referenceFileSchema = + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/a-twitter.avro"; + + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.newBuilder(SOURCE_URI, FormatOptions.avro()) + .setReferenceFileSchemaUri(referenceFileSchema) + .build(); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + Table createdTable = bigquery.create(tableInfo); + Table generatedTable = bigquery.getTable(createdTable.getTableId()); + assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); + // clean up after test to avoid conflict with other tests + assertTrue(bigquery.delete(tableId)); + } + + @Test + void testCreateExternalTableWithReferenceFileSchemaParquet() { + String destinationTableName = + generateRandomName("test_create_external_table_reference_file_schema_parquet"); + TableId tableId = TableId.of(DATASET, destinationTableName); + Schema expectedSchema = + Schema.of( + Field.newBuilder("username", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build(), + Field.newBuilder("tweet", StandardSQLTypeName.STRING).setMode(Mode.NULLABLE).build(), + Field.newBuilder("timestamp", StandardSQLTypeName.STRING) + .setMode(Mode.NULLABLE) + .build(), + Field.newBuilder("likes", StandardSQLTypeName.INT64).setMode(Mode.NULLABLE).build()); + String CLOUD_SAMPLES_DATA = "cloud-samples-data"; + + // By default, the table should have c-twitter schema because it is lexicographically last. + // a-twitter schema (username, tweet, timestamp, likes) + // b-twitter schema (username, tweet, timestamp) + // c-twitter schema (username, tweet) + String SOURCE_URI = + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/*.parquet"; + + // Because referenceFileSchemaUri is set as a-twitter, the table will have a-twitter schema + String referenceFileSchema = + "gs://" + + CLOUD_SAMPLES_DATA + + "/bigquery/federated-formats-reference-file-schema/a-twitter.parquet"; + + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.newBuilder(SOURCE_URI, FormatOptions.parquet()) + .setReferenceFileSchemaUri(referenceFileSchema) + .build(); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + Table createdTable = bigquery.create(tableInfo); + Table generatedTable = bigquery.getTable(createdTable.getTableId()); + assertEquals(expectedSchema, generatedTable.getDefinition().getSchema()); + // clean up after test to avoid conflict with other tests + assertTrue(bigquery.delete(tableId)); + } + + @Test + void testCloneTableCopyJob() throws InterruptedException { + String sourceTableName = generateRandomName("test_copy_job_base_table"); + String ddlTableName = TABLE_ID_SIMPLE.getTable(); + String cloneTableName = generateRandomName("test_clone_table"); + // Create source table with some data in it + String ddlQuery = + String.format( + "CREATE OR REPLACE TABLE %s (" + + "TimestampField TIMESTAMP OPTIONS(description='TimestampDescription'), " + + "StringField STRING OPTIONS(description='StringDescription'), " + + "BooleanField BOOLEAN OPTIONS(description='BooleanDescription') " + + ") AS SELECT * FROM %s", + sourceTableName, ddlTableName); + QueryJobConfiguration ddlConfig = + QueryJobConfiguration.newBuilder(ddlQuery).setDefaultDataset(DatasetId.of(DATASET)).build(); + TableId sourceTableId = TableId.of(DATASET, sourceTableName); + TableResult result = bigquery.query(ddlConfig); + assertNotNull(result.getJobId()); + assertEquals(SIMPLE_TABLE_SCHEMA, result.getSchema()); + Table remoteTable = bigquery.getTable(DATASET, sourceTableName); + assertNotNull(remoteTable); + + // Create clone table using source table as the base table + TableId cloneTableId = TableId.of(DATASET, cloneTableName); + CopyJobConfiguration cloneConfiguration = + CopyJobConfiguration.newBuilder(cloneTableId, sourceTableId) + .setOperationType("CLONE") + .build(); + Job createdJob = bigquery.create(JobInfo.of(cloneConfiguration)); + CopyJobConfiguration createdConfiguration = createdJob.getConfiguration(); + assertNotNull(createdConfiguration.getSourceTables()); + assertNotNull(createdConfiguration.getOperationType()); + assertNotNull(createdConfiguration.getDestinationTable()); + Job completedJob = createdJob.waitFor(); + assertNull(completedJob.getStatus().getError()); + + Table cloneTable = bigquery.getTable(DATASET, cloneTableName); + assertNotNull(cloneTable); + assertEquals(cloneTableId.getDataset(), cloneTable.getTableId().getDataset()); + assertEquals(cloneTableName, cloneTable.getTableId().getTable()); + assertEquals(TableDefinition.Type.TABLE, cloneTable.getDefinition().getType()); + assertTrue(cloneTable.getDefinition() instanceof StandardTableDefinition); + assertEquals(SIMPLE_TABLE_SCHEMA, cloneTable.getDefinition().getSchema()); + assertTrue(cloneTable.getCloneDefinition() instanceof CloneDefinition); + assertEquals(sourceTableName, cloneTable.getCloneDefinition().getBaseTableId().getTable()); + assertNotNull(cloneTable.getCloneDefinition().getCloneTime()); + + // Clean up + assertTrue(remoteTable.delete()); + assertTrue(cloneTable.delete()); + } + + @Test + void testHivePartitioningOptionsFieldsFieldExistence() throws InterruptedException { + String tableName = "hive_partitioned_external_table"; + + // Create data on GCS + String sourceDirectory = "bigquery/hive-partitioning-table/example"; + BlobInfo blobInfo = BlobInfo.newBuilder(BUCKET, sourceDirectory + "/key=foo/data.json").build(); + assertNotNull( + storage.create(blobInfo, "{\"name\":\"bar\"}".getBytes(StandardCharsets.UTF_8)), + "Failed to upload JSON to GCS"); + String sourceUri = "gs://" + BUCKET + "/" + sourceDirectory + "/*"; + String sourceUriPrefix = "gs://" + BUCKET + "/" + sourceDirectory + "/"; + + // Create the external table + HivePartitioningOptions hivePartitioningOptions = + HivePartitioningOptions.newBuilder() + .setMode("AUTO") + .setRequirePartitionFilter(true) + .setSourceUriPrefix(sourceUriPrefix) + .build(); + + TableId tableId = TableId.of(DATASET, tableName); + ExternalTableDefinition customTable = + ExternalTableDefinition.newBuilder(sourceUri, FormatOptions.json()) + .setAutodetect(true) + .setHivePartitioningOptions(hivePartitioningOptions) + .build(); + bigquery.create(TableInfo.of(tableId, customTable)); + + // Validate the existence of the field HivePartitioningOptions.fields + Table table = bigquery.getTable(tableId); + assertThat(table).isNotNull(); + HivePartitioningOptions options = + ((ExternalTableDefinition) table.getDefinition()).getHivePartitioningOptions(); + List fields = options.getFields(); + assertThat(fields).isNotNull(); + assertThat(fields).hasSize(1); + assertThat(fields).contains("key"); + + // Clean up + assertTrue(table.delete()); + assertTrue(storage.delete(blobInfo.getBlobId())); + } + + @Test + void testPrimaryKey() { + String tableName = "test_primary_key"; + TableId tableId = TableId.of(DATASET, tableName); + PrimaryKey primaryKey = PrimaryKey.newBuilder().setColumns(Arrays.asList("ID")).build(); + TableConstraints tableConstraintsPk = + TableConstraints.newBuilder().setPrimaryKey(primaryKey).build(); + + try { + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder() + .setSchema(CONSTRAINTS_TABLE_SCHEMA) + .setTableConstraints(tableConstraintsPk) + .build(); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertEquals( + tableConstraintsPk, + remoteTable.getDefinition().getTableConstraints()); + } finally { + bigquery.delete(tableId); + } + } + + @Test + void testPrimaryKeyUpdate() { + String tableName = "test_primary_key_update"; + TableId tableId = TableId.of(DATASET, tableName); + PrimaryKey primaryKey = + PrimaryKey.newBuilder().setColumns(Arrays.asList("FirstName", "LastName")).build(); + TableConstraints tableConstraintsPk = + TableConstraints.newBuilder().setPrimaryKey(primaryKey).build(); + + try { + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(CONSTRAINTS_TABLE_SCHEMA).build(); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNull(remoteTable.getDefinition().getTableConstraints()); + + Table updatedTable = + remoteTable.toBuilder().setTableConstraints(tableConstraintsPk).build().update(); + assertNotNull(updatedTable); + Table remoteUpdatedTable = bigquery.getTable(DATASET, tableName); + assertEquals( + tableConstraintsPk, + remoteUpdatedTable.getDefinition().getTableConstraints()); + } finally { + bigquery.delete(tableId); + } + } + + @Test + void testForeignKeys() { + String tableNamePk = "test_foreign_key"; + String tableNameFk = "test_foreign_key2"; + // TableIds referenced by foreign keys need project id to be specified + TableId tableIdPk = TableId.of(PROJECT_ID, DATASET, tableNamePk); + TableId tableIdFk = TableId.of(DATASET, tableNameFk); + ColumnReference columnReference = + ColumnReference.newBuilder().setReferencingColumn("ID").setReferencedColumn("ID").build(); + + PrimaryKey primaryKey = + PrimaryKey.newBuilder().setColumns(Collections.singletonList("ID")).build(); + TableConstraints tableConstraintsPk = + TableConstraints.newBuilder().setPrimaryKey(primaryKey).build(); + + ForeignKey foreignKey = + ForeignKey.newBuilder() + .setName("foreign_key") + .setReferencedTable(tableIdPk) + .setColumnReferences(Collections.singletonList(columnReference)) + .build(); + TableConstraints tableConstraintsFk = + TableConstraints.newBuilder().setForeignKeys(Collections.singletonList(foreignKey)).build(); + + try { + StandardTableDefinition tableDefinitionPk = + StandardTableDefinition.newBuilder() + .setSchema(CONSTRAINTS_TABLE_SCHEMA) + .setTableConstraints(tableConstraintsPk) + .build(); + Table createdTablePk = bigquery.create(TableInfo.of(tableIdPk, tableDefinitionPk)); + assertNotNull(createdTablePk); + + StandardTableDefinition tableDefinitionFk = + StandardTableDefinition.newBuilder() + .setSchema(CONSTRAINTS_TABLE_SCHEMA) + .setTableConstraints(tableConstraintsFk) + .build(); + Table createdTableFk = bigquery.create(TableInfo.of(tableIdFk, tableDefinitionFk)); + assertNotNull(createdTableFk); + Table remoteTable = bigquery.getTable(DATASET, tableNameFk); + assertEquals( + tableConstraintsFk, + remoteTable.getDefinition().getTableConstraints()); + } finally { + bigquery.delete(tableIdPk); + bigquery.delete(tableIdFk); + } + } + + @Test + void testForeignKeysUpdate() { + String tableNameFk = "test_foreign_key"; + String tableNamePk1 = "test_foreign_key2"; + String tableNamePk2 = "test_foreign_key3"; + TableId tableIdFk = TableId.of(DATASET, tableNameFk); + // TableIds referenced by foreign keys need project id to be specified + TableId tableIdPk1 = TableId.of(PROJECT_ID, DATASET, tableNamePk1); + TableId tableIdPk2 = TableId.of(PROJECT_ID, DATASET, tableNamePk2); + + ArrayList foreignKeys = new ArrayList<>(); + + // set up ID in tableFk as a foreign key to tablePk1 + ColumnReference columnReferencePk1 = + ColumnReference.newBuilder().setReferencingColumn("ID").setReferencedColumn("ID").build(); + PrimaryKey primaryKey1 = + PrimaryKey.newBuilder().setColumns(Collections.singletonList("ID")).build(); + TableConstraints tableConstraintsPk1 = + TableConstraints.newBuilder().setPrimaryKey(primaryKey1).build(); + + ForeignKey foreignKey1 = + ForeignKey.newBuilder() + .setName("foreign_key1") + .setReferencedTable(tableIdPk1) + .setColumnReferences(Collections.singletonList(columnReferencePk1)) + .build(); + foreignKeys.add(foreignKey1); + + // set up First and last names in tableFk as foreign keys to TablePk2 + ArrayList columnReferencesPk2 = new ArrayList<>(); + columnReferencesPk2.add( + ColumnReference.newBuilder() + .setReferencingColumn("FirstName") + .setReferencedColumn("FirstName") + .build()); + columnReferencesPk2.add( + ColumnReference.newBuilder() + .setReferencingColumn("LastName") + .setReferencedColumn("LastName") + .build()); + + ArrayList primaryKey2Columns = new ArrayList<>(); + primaryKey2Columns.add("FirstName"); + primaryKey2Columns.add("LastName"); + + PrimaryKey primaryKey2 = PrimaryKey.newBuilder().setColumns(primaryKey2Columns).build(); + TableConstraints tableConstraintsPk2 = + TableConstraints.newBuilder().setPrimaryKey(primaryKey2).build(); + ForeignKey foreignKey2 = + ForeignKey.newBuilder() + .setName("foreign_key2") + .setReferencedTable(tableIdPk2) + .setColumnReferences(columnReferencesPk2) + .build(); + foreignKeys.add(foreignKey2); + TableConstraints tableConstraintsFk = + TableConstraints.newBuilder().setForeignKeys(foreignKeys).build(); + + try { + StandardTableDefinition tableDefinitionFk = + StandardTableDefinition.newBuilder().setSchema(CONSTRAINTS_TABLE_SCHEMA).build(); + Table createdTableFk = bigquery.create(TableInfo.of(tableIdFk, tableDefinitionFk)); + assertNotNull(createdTableFk); + + StandardTableDefinition tableDefinitionPk1 = + StandardTableDefinition.newBuilder() + .setSchema(CONSTRAINTS_TABLE_SCHEMA) + .setTableConstraints(tableConstraintsPk1) + .build(); + Table createdTablePk1 = bigquery.create(TableInfo.of(tableIdPk1, tableDefinitionPk1)); + assertNotNull(createdTablePk1); + + StandardTableDefinition tableDefinitionPk2 = + StandardTableDefinition.newBuilder() + .setSchema(CONSTRAINTS_TABLE_SCHEMA) + .setTableConstraints(tableConstraintsPk2) + .build(); + Table createdTablePk2 = bigquery.create(TableInfo.of(tableIdPk2, tableDefinitionPk2)); + assertNotNull(createdTablePk2); + + Table remoteTable = bigquery.getTable(DATASET, tableNameFk); + assertNull(remoteTable.getDefinition().getTableConstraints()); + + Table updatedTable = + remoteTable.toBuilder().setTableConstraints(tableConstraintsFk).build().update(); + + assertNotNull(updatedTable); + Table remoteUpdatedTable = bigquery.getTable(DATASET, tableNameFk); + assertEquals( + tableConstraintsFk, + remoteUpdatedTable.getDefinition().getTableConstraints()); + } finally { + bigquery.delete(tableIdFk); + bigquery.delete(tableIdPk1); + bigquery.delete(tableIdPk2); + } + } + + @Test + void testAlreadyExistJobExceptionHandling() throws InterruptedException { + String query = + "SELECT TimestampField, StringField, BooleanField FROM " + + DATASET + + "." + + TABLE_ID.getTable(); + JobId jobId = JobId.newBuilder().setRandomJob().build(); + + JobConfiguration queryJobConfiguration = QueryJobConfiguration.newBuilder(query).build(); + // Creating the job with the explicit jobID + bigquery.create(JobInfo.of(jobId, queryJobConfiguration)); + // Calling the query method with the job that has already been created. + // This should throw ALREADY_EXISTS error without the exception handling added + // or if the job is older than 24 hours. + try { + bigquery.query(QueryJobConfiguration.newBuilder(query).build(), jobId); + // Test succeeds if Exception is not thrown and code flow reaches this statement. + assertTrue(true); + } catch (BigQueryException ex) { + // test fails if an exception is thrown + if (ex.getCause() != null && ex.getCause().getMessage().contains("Already Exists: Job")) { + fail("Already exists error should not be thrown"); + } + } + } + + @Test + void testStatelessQueries() throws InterruptedException { + // Create local BigQuery to not contaminate global test parameters. + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQuery bigQuery = bigqueryHelper.getOptions().getService(); + + // Stateless query should have no job id. + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + TableResult tableResult = executeSimpleQuery(bigQuery); + // Use XOR: We accept EITHER a QueryId (fast path) OR a JobId (slow fallback), but not both. + // Ideally Stateless query will return queryId but in some cases it would return jobId instead + // of queryId based on the query complexity or other factors (job timeout configs). + assertTrue( + (tableResult.getJobId() != null) ^ (tableResult.getQueryId() != null), + "Exactly one of jobId or queryId should be non-null"); + + // Job creation takes over, no query id is created. + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_REQUIRED); + tableResult = executeSimpleQuery(bigQuery); + assertNull(tableResult.getQueryId()); + assertNotNull(tableResult.getJobId()); + + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_MODE_UNSPECIFIED); + tableResult = executeSimpleQuery(bigQuery); + assertNotNull(tableResult.getQueryId()); + assertNotNull(tableResult.getJobId()); + } + + private TableResult executeSimpleQuery(BigQuery bigQuery) throws InterruptedException { + String query = "SELECT CURRENT_TIMESTAMP() as ts"; + QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).build(); + return bigQuery.query(config); + } + + @Test + void testTableResultJobIdAndQueryId() throws InterruptedException { + // For stateless queries, jobId and queryId are populated based on the following criteria: + // 1. For stateless queries, then queryId is populated. + // 2. For queries that fails the requirements to be stateless, then jobId is populated and + // queryId is not. + // 3. For explicitly created jobs, then jobId is populated and queryId is not populated. + // 4. If QueryJobConfiguration explicitly sets Job Creation Mode to Required. + + // Test scenario 1. + // Create local BigQuery for test scenario 1 to not contaminate global test parameters. + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQuery bigQuery = bigqueryHelper.getOptions().getService(); + // Allow queries to be stateless. + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + String query = "SELECT 1 as one"; + QueryJobConfiguration configStateless = QueryJobConfiguration.newBuilder(query).build(); + TableResult result = bigQuery.query(configStateless); + // This should trigger a stateless query due to the query simplicity. However, BQ's engine + // may configure this to be a job due a variety of factors. The QueryID is autopopulated and + // may also return a JobId if changed to a job. For the query above, the Job Creation Reason + // would always be `OTHER` as it is not request, a large result, or due to a timeout. + assertNotNull(result.getQueryId()); + if (result.getJobCreationReason() != null) { + assertNotNull(result.getJobId()); + assertEquals(result.getQueryId(), result.getJobId().getJob()); + assertEquals(JobCreationReason.Code.OTHER, result.getJobCreationReason().getCode()); + } + + // Test scenario 2 by failing stateless check by setting job timeout. + QueryJobConfiguration configQueryWithJob = + QueryJobConfiguration.newBuilder(query).setJobTimeoutMs(1L).build(); + result = bigQuery.query(configQueryWithJob); + assertNotNull(result.getJobId()); + assertNull(result.getQueryId()); + + // Test scenario 3. + QueryJobConfiguration configWithJob = QueryJobConfiguration.newBuilder(query).build(); + Job job = bigQuery.create(JobInfo.of(JobId.of(), configWithJob)); + result = job.getQueryResults(); + assertNotNull(result.getJobId()); + assertNull(result.getQueryId()); + + // Test scenario 4. + configWithJob = + QueryJobConfiguration.newBuilder(query) + .setJobCreationMode(JobCreationMode.JOB_CREATION_REQUIRED) + .build(); + result = bigQuery.query(configWithJob); + result = job.getQueryResults(); + assertNotNull(result.getJobId()); + assertNull(result.getQueryId()); + } + + @Test + void testStatelessQueriesWithLocation() throws Exception { + // This test validates BigQueryOption location is used for stateless query by verifying that the + // stateless query fails when the BigQueryOption location does not match the dataset location. + String location = "EU"; + String wrongLocation = "US"; + + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQuery bigQuery = + bigqueryHelper.getOptions().toBuilder().setLocation(location).build().getService(); + + String datasetName = "locationset_" + UUID.randomUUID().toString().replace("-", "_"); + Dataset dataset = + bigQuery.create(DatasetInfo.newBuilder(datasetName).setLocation(location).build()); + try { + TableId tableId = TableId.of(dataset.getDatasetId().getDataset(), "sometable"); + Schema schema = Schema.of(Field.of("name", LegacySQLTypeName.STRING)); + TableDefinition tableDef = StandardTableDefinition.of(schema); + Table table = bigQuery.create(TableInfo.newBuilder(tableId, tableDef).build()); + + String query = + String.format( + "SELECT * FROM `%s.%s.%s`", + table.getTableId().getProject(), + table.getTableId().getDataset(), + table.getTableId().getTable()); + + // Test stateless query when BigQueryOption location matches dataset location. + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + TableResult tb = bigQuery.query(QueryJobConfiguration.of(query)); + assertNull(tb.getJobId()); + + // Test stateless query when BigQueryOption location does not match dataset location. + assertThrows( + BigQueryException.class, + () -> { + BigQuery bigQueryWrongLocation = + bigqueryHelper.getOptions().toBuilder() + .setLocation(wrongLocation) + .build() + .getService(); + bigQueryWrongLocation + .getOptions() + .setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + bigQueryWrongLocation.query(QueryJobConfiguration.of(query)); + }, + "querying a table with wrong location shouldn't work"); + } finally { + RemoteBigQueryHelper.forceDelete(bigQuery, datasetName); + } + } + + @Test + void testQueryWithTimeout() throws InterruptedException { + // Validate that queryWithTimeout returns either TableResult or Job object + + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQuery bigQuery = bigqueryHelper.getOptions().getService(); + bigQuery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + String largeQuery = + "SELECT * FROM UNNEST(GENERATE_ARRAY(1, 20000)) CROSS JOIN UNNEST(GENERATE_ARRAY(1, 20000))"; + String query = "SELECT 1 as one"; + // Test scenario 1. + // Stateless query returns TableResult + QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).build(); + Object result = bigQuery.queryWithTimeout(config, null, null); + assertInstanceOf(TableResult.class, result); + // This should trigger a stateless query due to the query simplicity. However, BQ's engine + // may configure this to be a job due a variety of factors. The QueryID is autopopulated and + // may also return a JobId if changed to a Job. For the query above, the Job Creation Reason + // would always be `OTHER` as it is not request, a large result, or due to a timeout. + TableResult tableResult = (TableResult) result; + assertNotNull(tableResult.getQueryId()); + if (tableResult.getJobCreationReason() != null) { + assertNotNull(tableResult.getJobId()); + assertEquals(tableResult.getQueryId(), tableResult.getJobId().getJob()); + assertEquals(JobCreationReason.Code.OTHER, tableResult.getJobCreationReason().getCode()); + } + + // Stateful query returns Job + // Test scenario 2 to ensure job is created if JobCreationMode is set, but for a small query + // it still returns results. + config = + QueryJobConfiguration.newBuilder(query) + .setJobCreationMode(JobCreationMode.JOB_CREATION_REQUIRED) + .build(); + result = bigQuery.queryWithTimeout(config, null, null); + assertTrue(result instanceof TableResult); + assertNotNull(((TableResult) result).getJobId()); + assertNull(((TableResult) result).getQueryId()); + + // Stateful query returns Job + // Test scenario 3 to ensure job is created if Query is long running. + // Explicitly disable cache to ensure it is long-running query; + config = QueryJobConfiguration.newBuilder(largeQuery).setUseQueryCache(false).build(); + long millis = System.currentTimeMillis(); + result = bigQuery.queryWithTimeout(config, null, 1000L); + millis = System.currentTimeMillis() - millis; + assertTrue(result instanceof Job); + // Cancel the job as we don't need results. + ((Job) result).cancel(); + // Allow 2 seconds of timeout value to account for random delays + assertTrue(millis < 1_000_000 * 2); + } + + @Test + void testUniverseDomainWithInvalidUniverseDomain() { + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQueryOptions bigQueryOptions = + bigqueryHelper.getOptions().toBuilder() + .setCredentials(loadCredentials(FAKE_JSON_CRED_WITH_GOOGLE_DOMAIN)) + .setUniverseDomain("invalid.domain") + .build(); + BigQuery bigQuery = bigQueryOptions.getService(); + + BigQueryException exception = + assertThrows( + BigQueryException.class, + () -> bigQuery.listDatasets("bigquery-public-data"), + "RPCs to invalid universe domain should fail"); + assertEquals(HTTP_UNAUTHORIZED, exception.getCode()); + assertNotNull(exception.getMessage()); + assertTrue( + exception + .getMessage() + .contains("does not match the universe domain found in the credentials")); + } + + @Test + void testInvalidUniverseDomainWithMismatchCredentials() { + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQueryOptions bigQueryOptions = + bigqueryHelper.getOptions().toBuilder() + .setCredentials(loadCredentials(FAKE_JSON_CRED_WITH_INVALID_DOMAIN)) + .build(); + BigQuery bigQuery = bigQueryOptions.getService(); + + BigQueryException exception = + assertThrows( + BigQueryException.class, + () -> bigQuery.listDatasets("bigquery-public-data"), + "RPCs to invalid universe domain should fail"); + assertEquals(HTTP_UNAUTHORIZED, exception.getCode()); + assertNotNull(exception.getMessage()); + assertTrue( + exception + .getMessage() + .contains("does not match the universe domain found in the credentials")); + } + + @Test + void testUniverseDomainWithMatchingDomain() { + // Test a valid domain using the default credentials and Google default universe domain. + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + BigQueryOptions bigQueryOptions = + bigqueryHelper.getOptions().toBuilder().setUniverseDomain("googleapis.com").build(); + BigQuery bigQuery = bigQueryOptions.getService(); + + // Verify that all is well by listing a dataset. + Page datasets = bigQuery.listDatasets("bigquery-public-data"); + Iterator iterator = datasets.iterateAll().iterator(); + Set datasetNames = new HashSet<>(); + Map datasetLocation = new HashMap<>(); + while (iterator.hasNext()) { + Dataset dataset = iterator.next(); + String name = dataset.getDatasetId().getDataset(); + datasetNames.add(name); + datasetLocation.put(name, dataset.getLocation()); + } + for (String type : PUBLIC_DATASETS) { + assertTrue(datasetNames.contains(type)); + assertEquals(PUBLIC_DATASETS_LOCATION.get(type), datasetLocation.get(type)); + } + } + + @Test + void testExternalTableMetadataCachingNotEnable() throws InterruptedException { + String tableName = generateRandomName("test_metadata_cache_not_enable"); + TableId tableId = TableId.of(DATASET, tableName); + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.of( + "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertTrue(remoteTable.getDefinition() instanceof ExternalTableDefinition); + assertEquals(createdTable.getTableId(), remoteTable.getTableId()); + assertEquals(TABLE_SCHEMA, remoteTable.getDefinition().getSchema()); + + String query = String.format("SELECT * FROM %s.%s", DATASET, tableName); + QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).build(); + + Job remoteJob = bigquery.create(JobInfo.of(config)); + remoteJob = remoteJob.waitFor(); + assertNull(remoteJob.getStatus().getError()); + + Job queryJob = bigquery.getJob(remoteJob.getJobId()); + JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); + assertNotNull(statistics); + assertNotNull(statistics.getMetadataCacheStats()); + assertThat(statistics.getMetadataCacheStats().getTableMetadataCacheUsage().size()).isEqualTo(1); + assertThat( + statistics + .getMetadataCacheStats() + .getTableMetadataCacheUsage() + .get(0) + .getUnusedReason()) + .isEqualTo(UnusedReason.METADATA_CACHING_NOT_ENABLED); + + assertTrue(remoteTable.delete()); + } + + @Test + void testExternalMetadataCacheModeFailForNonBiglake() { + // Validate that MetadataCacheMode is passed to the backend. + // TODO: Enhance this test after BigLake testing infrastructure is inplace. + String tableName = generateRandomName("test_metadata_cache_mode_fail_for_non_biglake"); + TableId tableId = TableId.of(DATASET, tableName); + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.newBuilder( + "gs://" + BUCKET + "/" + JSON_LOAD_FILE, TABLE_SCHEMA, FormatOptions.json()) + .setMetadataCacheMode("AUTOMATIC") + .build(); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + + BigQueryException exception = + assertThrows( + BigQueryException.class, + () -> bigquery.create(tableInfo), + "BigQueryException was expected"); + BigQueryError error = exception.getError(); + assertNotNull(error); + assertEquals("invalid", error.getReason()); + assertTrue( + exception + .getMessage() + .contains("metadataCacheMode provided for non BigLake external table")); + } + + @Test + void testObjectTable() throws InterruptedException { + String tableName = generateRandomName("test_object_table"); + TableId tableId = TableId.of(DATASET, tableName); + + String sourceUri = "gs://" + BUCKET + "/" + JSON_LOAD_FILE; + ExternalTableDefinition externalTableDefinition = + ExternalTableDefinition.newBuilder(sourceUri) + .setConnectionId( + "projects/java-docs-samples-testing/locations/us/connections/DEVREL_TEST_CONNECTION") + .setObjectMetadata("SIMPLE") + .build(); + TableInfo tableInfo = TableInfo.of(tableId, externalTableDefinition); + Table createdTable = bigquery.create(tableInfo); + assertNotNull(createdTable); + assertEquals(DATASET, createdTable.getTableId().getDataset()); + assertEquals(tableName, createdTable.getTableId().getTable()); + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + + try { + assertTrue(remoteTable.getDefinition() instanceof ExternalTableDefinition); + assertEquals(createdTable.getTableId(), remoteTable.getTableId()); + assertEquals( + "SIMPLE", ((ExternalTableDefinition) remoteTable.getDefinition()).getObjectMetadata()); + assertNotNull(remoteTable.getDefinition().getSchema().getFields().get("uri")); + + String query = String.format("SELECT * FROM %s.%s", DATASET, tableName); + QueryJobConfiguration config = QueryJobConfiguration.newBuilder(query).build(); + + Job remoteJob = bigquery.create(JobInfo.of(config)); + remoteJob = remoteJob.waitFor(); + assertNull(remoteJob.getStatus().getError()); + + Job queryJob = bigquery.getJob(remoteJob.getJobId()); + JobStatistics.QueryStatistics statistics = queryJob.getStatistics(); + assertNotNull(statistics); + assertThat(statistics.getTotalBytesProcessed()).isGreaterThan(0); + } finally { + assertTrue(remoteTable.delete()); + } + } + + @Test + void testQueryExportStatistics() throws InterruptedException { + String query = + String.format( + "EXPORT DATA OPTIONS(\n" + + " uri='gs://%s/*.csv',\n" + + " format='CSV',\n" + + " overwrite=true,\n" + + " header=true,\n" + + " field_delimiter=';') AS\n" + + "SELECT num FROM UNNEST([1,2,3]) AS num", + BUCKET); + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); + Job job = bigquery.create(JobInfo.of(JobId.of(), config)); + job = job.waitFor(); + + QueryStatistics queryStatistics = job.getStatistics(); + assertNotNull(queryStatistics); + assertNotNull(queryStatistics.getExportDataStats()); + assertEquals(1L, queryStatistics.getExportDataStats().getFileCount().longValue()); + assertEquals(3L, queryStatistics.getExportDataStats().getRowCount().longValue()); + } + + @Test + void testLoadConfigurationFlexibleColumnName() throws InterruptedException { + // See https://cloud.google.com/bigquery/docs/reference/rest/v2/Job#columnnamecharactermap for + // mapping. + + // Test v1 mapping. + String v1TableName = generateRandomName("flexible_column_name_data_testing_table_v1"); + TableId v1TableId = TableId.of(DATASET, v1TableName); + try { + LoadJobConfiguration loadJobConfigurationV1 = + LoadJobConfiguration.newBuilder( + v1TableId, + "gs://" + BUCKET + "/" + LOAD_FILE_FLEXIBLE_COLUMN_NAME, + FormatOptions.csv()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setAutodetect(true) + .setColumnNameCharacterMap("V1") + .build(); + Job jobV1 = bigquery.create(JobInfo.of(loadJobConfigurationV1)); + jobV1 = jobV1.waitFor(); + assertNull(jobV1.getStatus().getError()); + + Table remoteTableV1 = bigquery.getTable(DATASET, v1TableName); + assertNotNull(remoteTableV1); + assertEquals( + "_ampersand", remoteTableV1.getDefinition().getSchema().getFields().get(1).getName()); + } finally { + bigquery.delete(v1TableId); + } + + // Test v2 mapping. + String v2TableName = generateRandomName("flexible_column_name_data_testing_table_v2"); + TableId v2TableId = TableId.of(DATASET, v2TableName); + try { + LoadJobConfiguration loadJobConfigurationV2 = + LoadJobConfiguration.newBuilder( + v2TableId, + "gs://" + BUCKET + "/" + LOAD_FILE_FLEXIBLE_COLUMN_NAME, + FormatOptions.csv()) + .setCreateDisposition(JobInfo.CreateDisposition.CREATE_IF_NEEDED) + .setAutodetect(true) + .setColumnNameCharacterMap("V2") + .build(); + Job jobV2 = bigquery.create(JobInfo.of(loadJobConfigurationV2)); + jobV2 = jobV2.waitFor(); + assertNull(jobV2.getStatus().getError()); + + Table remoteTableV2 = bigquery.getTable(DATASET, v2TableName); + assertNotNull(remoteTableV2); + assertEquals( + "&ersand", remoteTableV2.getDefinition().getSchema().getFields().get(1).getName()); + } finally { + bigquery.delete(v2TableId); + } + } + + @Test + void testStatementType() throws InterruptedException { + String tableName = "test_materialized_view_table_statemnt_type"; + String createQuery = + String.format( + "CREATE MATERIALIZED VIEW %s.%s.%s " + + "AS (SELECT MAX(TimestampField) AS TimestampField,StringField, MAX(BooleanField) AS BooleanField FROM %s.%s.%s GROUP BY StringField)", + PROJECT_ID, DATASET, tableName, PROJECT_ID, DATASET, TABLE_ID.getTable()); + TableResult result = bigquery.query(QueryJobConfiguration.of(createQuery)); + assertNotNull(result); + Job job = bigquery.getJob(result.getJobId()); + JobStatistics.QueryStatistics stats = job.getStatistics(); + assertEquals(StatementType.CREATE_MATERIALIZED_VIEW, stats.getStatementType()); + + // cleanup + Table remoteTable = bigquery.getTable(DATASET, tableName); + assertNotNull(remoteTable); + assertTrue(remoteTable.getDefinition() instanceof MaterializedViewDefinition); + assertTrue(remoteTable.delete()); + } + + @Test + public void testOpenTelemetryTracingDatasets() { + Tracer tracer = otel.getTracer("Test Tracer"); + BigQueryOptions otelOptions = + BigQueryOptions.newBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build(); + BigQuery bigquery = otelOptions.getService(); + + Span parentSpan = + tracer + .spanBuilder("Test Parent Span") + .setNoParent() + .setAttribute("test-attribute", "test-value") + .startSpan(); + String billingModelDataset = RemoteBigQueryHelper.generateDatasetName(); + + try (Scope parentScope = parentSpan.makeCurrent()) { + DatasetInfo info = + DatasetInfo.newBuilder(billingModelDataset) + .setDescription(DESCRIPTION) + .setMaxTimeTravelHours(72L) + .setLabels(LABELS) + .build(); + + Dataset dataset = bigquery.create(info); + assertNotNull(dataset); + dataset = bigquery.getDataset(dataset.getDatasetId().getDataset()); + assertNotNull(dataset); + + DatasetInfo updatedInfo = + DatasetInfo.newBuilder(billingModelDataset) + .setDescription("Updated Description") + .setMaxTimeTravelHours(96L) + .setLabels(LABELS) + .build(); + + dataset = bigquery.update(updatedInfo, DatasetOption.accessPolicyVersion(2)); + assertEquals("Updated Description", dataset.getDescription()); + assertTrue(bigquery.delete(dataset.getDatasetId())); + } finally { + parentSpan.end(); + Map, Object> createMap = + OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.createDataset"); + assertEquals("null", createMap.get(AttributeKey.stringKey("bq.dataset.location"))); + assertEquals( + "DatasetService", + OTEL_ATTRIBUTES + .get("com.google.cloud.bigquery.BigQueryRpc.createDataset") + .get(AttributeKey.stringKey("bq.rpc.service"))); + + Map, Object> getMap = + OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.getDataset"); + assertEquals(billingModelDataset, getMap.get(AttributeKey.stringKey("bq.dataset.id"))); + + Map, Object> updateMap = + OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.updateDataset"); + assertEquals("2", updateMap.get(AttributeKey.stringKey("bq.option.ACCESS_POLICY_VERSION"))); + + Map, Object> deleteMap = + OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.deleteDataset"); + assertEquals(billingModelDataset, deleteMap.get(AttributeKey.stringKey("bq.dataset.id"))); + + // All should be children spans of parentSpan + String testParentSpanName = "Test Parent Span"; + assertEquals( + testParentSpanName, + OTEL_SPAN_IDS_TO_NAMES.get( + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.getDataset"))); + assertEquals( + testParentSpanName, + OTEL_SPAN_IDS_TO_NAMES.get( + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createDataset"))); + assertEquals( + testParentSpanName, + OTEL_SPAN_IDS_TO_NAMES.get( + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.deleteDataset"))); + assertEquals( + OTEL_SPAN_IDS_TO_NAMES.get( + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQueryRpc.createDataset")), + "com.google.cloud.bigquery.BigQueryRetryHelper.runWithRetries"); + assertEquals(OTEL_PARENT_SPAN_ID, OTEL_PARENT_SPAN_IDS.get(testParentSpanName)); + RemoteBigQueryHelper.forceDelete(bigquery, billingModelDataset); + } + } + + @Test + public void testOpenTelemetryTracingTables() { + Tracer tracer = otel.getTracer("Test Tracer"); + BigQueryOptions otelOptions = + BigQueryOptions.newBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build(); + BigQuery bigquery = otelOptions.getService(); + + String tableName = "test_otel_table"; + StandardTableDefinition tableDefinition = StandardTableDefinition.of(TABLE_SCHEMA); + TableInfo tableInfo = + TableInfo.newBuilder(TableId.of(DATASET, tableName), tableDefinition) + .setDescription("Some Description") + .build(); + Table createdTable = bigquery.create(tableInfo); + assertEquals("Some Description", createdTable.getDescription()); + + assertEquals( + OTEL_PARENT_SPAN_ID, + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.createTable")); + assertEquals( + tableName, + OTEL_ATTRIBUTES + .get("com.google.cloud.bigquery.BigQuery.createTable") + .get(AttributeKey.stringKey("bq.table.id"))); + assertEquals( + "null", + OTEL_ATTRIBUTES + .get("com.google.cloud.bigquery.BigQuery.createTable") + .get(AttributeKey.stringKey("bq.table.creation_time"))); + assertEquals( + "InsertTable", + OTEL_ATTRIBUTES + .get("com.google.cloud.bigquery.BigQueryRpc.createTable") + .get(AttributeKey.stringKey("bq.rpc.method"))); + + Table updatedTable = + bigquery.update(createdTable.toBuilder().setDescription("Updated Description").build()); + assertThat(updatedTable.getDescription()).isEqualTo("Updated Description"); + + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.updateTable")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.patchTable")); + assertEquals( + OTEL_PARENT_SPAN_ID, + OTEL_PARENT_SPAN_IDS.get("com.google.cloud.bigquery.BigQuery.updateTable")); + assertTrue(bigquery.delete(updatedTable.getTableId())); + } + + @Test + public void testOpenTelemetryTracingQuery() throws InterruptedException { + Tracer tracer = otel.getTracer("Test Tracer"); + BigQueryOptions otelOptions = + BigQueryOptions.newBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build(); + BigQuery bigquery = otelOptions.getService(); + + // Stateless query + bigquery.getOptions().setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + TableResult tableResult = executeSimpleQuery(bigquery); + assertNotNull(tableResult.getQueryId()); + assertNull(tableResult.getJobId()); + + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.queryRpc")); + assertNotNull( + OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRetryHelper.runWithRetries")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.queryRpc")); + assertTrue(OTEL_ATTRIBUTES.containsKey("com.google.cloud.bigquery.BigQuery.queryWithTimeout")); + + // Query job + String query = "SELECT TimestampField, StringField, BooleanField FROM " + TABLE_ID.getTable(); + QueryJobConfiguration config = + QueryJobConfiguration.newBuilder(query).setDefaultDataset(DatasetId.of(DATASET)).build(); + Job job = bigquery.create(JobInfo.of(JobId.of(), config)); + + TableResult result = job.getQueryResults(); + assertNotNull(result.getJobId()); + assertEquals(QUERY_RESULT_SCHEMA, result.getSchema()); + + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.getQueryResults")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.listTableData")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.listTableData")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQuery.createJob")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.createJob")); + // Key exists, but value is null because no options were supplied in the request. + assertTrue(OTEL_ATTRIBUTES.containsKey("com.google.cloud.bigquery.Job.getQueryResults")); + assertNotNull(OTEL_ATTRIBUTES.get("com.google.cloud.bigquery.BigQueryRpc.getQueryResults")); + assertTrue(OTEL_ATTRIBUTES.containsKey("com.google.cloud.bigquery.Job.waitForQueryResults")); + } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java new file mode 100644 index 0000000000..e4dc534f9e --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITHighPrecisionTimestamp.java @@ -0,0 +1,339 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.it; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.DataFormatOptions; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.InsertAllRequest; +import com.google.cloud.bigquery.InsertAllResponse; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.TableResult; +import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import com.google.protobuf.Timestamp; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.UUID; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +class ITHighPrecisionTimestamp { + + private static final String TEST_HIGH_PRECISION_TIMESTAMP_TABLE_NAME = + generateTempTableName("test_high_precision_timestamp"); + private static BigQuery bigquery; + private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static TableId defaultTableId; + private static final long TIMESTAMP_PICOSECOND_PRECISION = 12L; + private static final Field TIMESTAMP_HIGH_PRECISION_FIELD_SCHEMA = + Field.newBuilder("timestampHighPrecisionField", StandardSQLTypeName.TIMESTAMP) + .setTimestampPrecision(TIMESTAMP_PICOSECOND_PRECISION) + .build(); + private static final Schema TABLE_SCHEMA = Schema.of(TIMESTAMP_HIGH_PRECISION_FIELD_SCHEMA); + + private static final String TIMESTAMP1 = "2025-01-01T12:34:56.123456789123Z"; + private static final String TIMESTAMP2 = "1970-01-01T12:34:56.123456789123Z"; + private static final String TIMESTAMP3 = "2000-01-01T12:34:56.123456789123Z"; + + private static String generateTempTableName(String prefix) { + return String.format("%s_%s", prefix, UUID.randomUUID().toString().substring(0, 8)); + } + + @BeforeAll + static void beforeClass() { + BigQueryOptions.Builder builder = + BigQueryOptions.newBuilder() + .setDataFormatOptions( + DataFormatOptions.newBuilder() + .timestampFormatOptions(DataFormatOptions.TimestampFormatOptions.ISO8601_STRING) + .build()); + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(builder); + bigquery = bigqueryHelper.getOptions().getService(); + + // Create a new dataset + DatasetInfo info = DatasetInfo.newBuilder(DATASET).build(); + bigquery.create(info); + + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build(); + defaultTableId = TableId.of(DATASET, TEST_HIGH_PRECISION_TIMESTAMP_TABLE_NAME); + + // Create a new table that can be re-used by the test cases + Table createdTable = bigquery.create(TableInfo.of(defaultTableId, tableDefinition)); + assertNotNull(createdTable); + + // Populate with some starter data + Map timestamp1 = + Collections.singletonMap("timestampHighPrecisionField", TIMESTAMP1); + Map timestamp2 = + Collections.singletonMap("timestampHighPrecisionField", TIMESTAMP2); + Map timestamp3 = + Collections.singletonMap("timestampHighPrecisionField", TIMESTAMP3); + InsertAllRequest request = + InsertAllRequest.newBuilder(defaultTableId) + .addRow(timestamp1) + .addRow(timestamp2) + .addRow(timestamp3) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.getInsertErrors().size()); + } + + @AfterAll + static void afterClass() { + if (bigquery != null) { + bigquery.delete(defaultTableId); + RemoteBigQueryHelper.forceDelete(bigquery, DATASET); + } + } + + @Test + void query_highPrecisionTimestamp() throws InterruptedException { + String sql = + String.format("SELECT timestampHighPrecisionField FROM %s;", defaultTableId.getTable()); + QueryJobConfiguration queryJobConfiguration = + QueryJobConfiguration.newBuilder(sql) + .setDefaultDataset(DatasetId.of(DATASET)) + .setUseLegacySql(false) + .build(); + TableResult result = bigquery.query(queryJobConfiguration); + assertNotNull(result.getJobId()); + String[] expected = new String[] {TIMESTAMP1, TIMESTAMP2, TIMESTAMP3}; + List timestamps = + StreamSupport.stream(result.getValues().spliterator(), false) + .map(x -> (String) x.get(0).getValue()) + .collect(Collectors.toList()); + assertEquals(expected.length, timestamps.size()); + assertThat(timestamps).containsAtLeastElementsIn(expected); + } + + @Test + void insert_highPrecisionTimestamp_ISOValidFormat() { + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build(); + String tempTableName = generateTempTableName("insert_temp"); + TableId tableId = TableId.of(DATASET, tempTableName); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + + Map timestampISO = + Collections.singletonMap("timestampHighPrecisionField", "2025-01-01T12:34:56.123456Z"); + InsertAllRequest request = InsertAllRequest.newBuilder(tableId).addRow(timestampISO).build(); + InsertAllResponse response = bigquery.insertAll(request); + assertFalse(response.hasErrors()); + assertEquals(0, response.getInsertErrors().size()); + + bigquery.delete(tableId); + } + + @Test + void insert_highPrecisionTimestamp_invalidFormats() { + StandardTableDefinition tableDefinition = + StandardTableDefinition.newBuilder().setSchema(TABLE_SCHEMA).build(); + String tempTable = generateTempTableName("insert_temp"); + TableId tableId = TableId.of(DATASET, tempTable); + Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition)); + assertNotNull(createdTable); + + Map timestampInMicros = + Collections.singletonMap("timestampHighPrecisionField", 123456); + Map timestampInMicrosString = + Collections.singletonMap("timestampHighPrecisionField", "123456"); + Map timestampNegative = + Collections.singletonMap("timestampHighPrecisionField", -123456); + Map timestampFloat = + Collections.singletonMap("timestampHighPrecisionField", 1000.0); + Map timestampProtobuf = + Collections.singletonMap( + "timestampHighPrecisionField", + Timestamp.newBuilder().setSeconds(123456789).setNanos(123456789).build()); + Map timestampProtobufNegative = + Collections.singletonMap( + "timestampHighPrecisionField", + Timestamp.newBuilder().setSeconds(-123456789).setNanos(-123456789).build()); + InsertAllRequest request = + InsertAllRequest.newBuilder(tableId) + .addRow(timestampInMicros) + .addRow(timestampInMicrosString) + .addRow(timestampNegative) + .addRow(timestampFloat) + .addRow(timestampProtobuf) + .addRow(timestampProtobufNegative) + .build(); + InsertAllResponse response = bigquery.insertAll(request); + assertTrue(response.hasErrors()); + assertEquals(request.getRows().size(), response.getInsertErrors().size()); + + bigquery.delete(tableId); + } + + @Test + void queryNamedParameter_highPrecisionTimestamp() throws InterruptedException { + String query = + String.format( + "SELECT * FROM %s.%s WHERE timestampHighPrecisionField >= CAST(@timestampParam AS TIMESTAMP(12))", + DATASET, defaultTableId.getTable()); + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter( + "timestampParam", + // For named parameters, java-bigquery does not expect the 'T' + QueryParameterValue.timestamp("2000-01-01 12:34:56.123456789123Z")) + .build(); + + TableResult result = bigquery.query(queryConfig); + assertNotNull(result); + String[] expected = new String[] {TIMESTAMP1, TIMESTAMP3}; + List timestamps = + StreamSupport.stream(result.getValues().spliterator(), false) + .map(x -> (String) x.get(0).getValue()) + .collect(Collectors.toList()); + assertEquals(expected.length, timestamps.size()); + assertThat(timestamps).containsAtLeastElementsIn(expected); + } + + @Test + void queryPositionalParameter_highPrecisionTimestamp() throws InterruptedException { + String query = + String.format( + "SELECT * FROM %s.%s WHERE timestampHighPrecisionField >= CAST(? AS TIMESTAMP(12))", + DATASET, defaultTableId.getTable()); + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addPositionalParameter( + // For positional parameters, java-bigquery does not expect the 'T' + QueryParameterValue.timestamp("2000-01-01 12:34:56.123456789123Z")) + .build(); + + TableResult result = bigquery.query(queryConfig); + assertNotNull(result); + String[] expected = new String[] {TIMESTAMP1, TIMESTAMP3}; + List timestamps = + StreamSupport.stream(result.getValues().spliterator(), false) + .map(x -> (String) x.get(0).getValue()) + .collect(Collectors.toList()); + assertEquals(expected.length, timestamps.size()); + assertThat(timestamps).containsAtLeastElementsIn(expected); + } + + @Test + void queryNamedParameter_highPrecisionTimestamp_microsLong() throws InterruptedException { + String query = + String.format( + "SELECT * FROM %s.%s WHERE timestampHighPrecisionField >= CAST(@timestampParam AS TIMESTAMP(12))", + DATASET, defaultTableId.getTable()); + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter( + "timestampParam", + QueryParameterValue.timestamp( + 946730096123456L)) // micros for 2000-01-01 12:34:56.123456Z + .build(); + + TableResult result = bigquery.query(queryConfig); + assertNotNull(result); + // Exact timestamp for TIMESTAMP3 is `2000-01-01T12:34:56.123456789123Z` and for the micros + // is `2000-01-01T12:34:56.123456Z`. The micros value gets cast to 12 digits of precision, so + // it becomes `2000-01-01T12:34:56.123456000000Z`. We do expect it as part of the query. + String[] expected = new String[] {TIMESTAMP1, TIMESTAMP3}; + List timestamps = + StreamSupport.stream(result.getValues().spliterator(), false) + .map(x -> (String) x.get(0).getValue()) + .collect(Collectors.toList()); + assertEquals(expected.length, timestamps.size()); + assertThat(timestamps).containsAtLeastElementsIn(expected); + } + + @Test + void queryNamedParameter_highPrecisionTimestamp_microsISOString() throws InterruptedException { + String query = + String.format( + "SELECT * FROM %s.%s WHERE timestampHighPrecisionField >= CAST(@timestampParam AS TIMESTAMP(12))", + DATASET, defaultTableId.getTable()); + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter( + // For named parameters, java-bigquery does not expect the 'T' + "timestampParam", QueryParameterValue.timestamp("2000-01-01 12:34:56.123456Z")) + .build(); + + TableResult result = bigquery.query(queryConfig); + assertNotNull(result); + List timestamps = + StreamSupport.stream(result.getValues().spliterator(), false) + .map(x -> (String) x.get(0).getValue()) + .collect(Collectors.toList()); + String[] expected = new String[] {TIMESTAMP1, TIMESTAMP3}; + assertEquals(expected.length, timestamps.size()); + assertThat(timestamps).containsAtLeastElementsIn(expected); + } + + @Test + void queryNamedParameter_highPrecisionTimestamp_noExplicitCastInQuery_fails() { + String query = + String.format( + "SELECT * FROM %s.%s WHERE timestampHighPrecisionField >= @timestampParam", + DATASET, defaultTableId.getTable()); + + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(DATASET) + .setUseLegacySql(false) + .addNamedParameter( + // For named parameters, java-bigquery does not expect the 'T' + "timestampParam", QueryParameterValue.timestamp("2000-01-01 12:34:56.123456789123")) + .build(); + + BigQueryException exception = + assertThrows(BigQueryException.class, () -> bigquery.query(queryConfig)); + assertEquals("Invalid argument type passed to a function", exception.getMessage()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java new file mode 100644 index 0000000000..6418682038 --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITNightlyBigQueryTest.java @@ -0,0 +1,710 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.google.cloud.bigquery.it; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryError; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.BigQueryResult; +import com.google.cloud.bigquery.BigQuerySQLException; +import com.google.cloud.bigquery.Connection; +import com.google.cloud.bigquery.ConnectionSettings; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.InsertAllRequest; +import com.google.cloud.bigquery.InsertAllResponse; +import com.google.cloud.bigquery.Parameter; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.Table; +import com.google.cloud.bigquery.TableDefinition; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import com.google.common.collect.ImmutableList; +import com.google.common.io.BaseEncoding; +import java.io.IOException; +import java.math.BigDecimal; +import java.nio.charset.StandardCharsets; +import java.sql.Date; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Time; +import java.time.LocalTime; +import java.time.ZoneId; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TimeZone; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.apache.arrow.vector.util.JsonStringArrayList; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.Timeout; + +@Timeout(value = 1800) // 30 min timeout +public class ITNightlyBigQueryTest { + private static final Logger logger = Logger.getLogger(ITNightlyBigQueryTest.class.getName()); + private static final String DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static final String TABLE = + "TEMP_RS_TEST_TABLE" + UUID.randomUUID().toString().substring(0, 8); + private static final byte[] BYTES = "TestByteValue".getBytes(StandardCharsets.UTF_8); + private static final String BYTES_BASE64 = BaseEncoding.base64().encode(BYTES); + // Script will populate NUM_BATCHES*REC_PER_BATCHES number of records (eg: 100*10000 = 1M) + private static final int NUM_BATCHES = 55; + private static final int REC_PER_BATCHES = 10000; + private static final int LIMIT_RECS = 500000; // We can plan to read ~ 500K / 1M records + private static final int MULTI_LIMIT_RECS = + 300000; // Used for multiquery testcase, a lower limit like 300K should be fine + private static int rowCnt = 0; + private static BigQuery bigquery; + private static final String BASE_QUERY = + "select StringField, GeographyField, BooleanField, BigNumericField, IntegerField, NumericField, BytesField, " + + "TimestampField, TimeField, DateField, IntegerArrayField, RecordField.BooleanField, RecordField.StringField ," + + " JSONField, JSONField.hello, JSONField.id from %s.%s order by IntegerField asc LIMIT %s"; + private static final String POSITIONAL_QUERY = + String.format( + "select RecordField.BooleanField, RecordField.StringField, StringField, BooleanField, BytesField, IntegerField, GeographyField, NumericField, BigNumericField, TimeField, DateField, TimestampField, JSONField from %s.%s where DateField = ? and BooleanField = ? and IntegerField > ? and NumericField > ? LIMIT %s", + DATASET, TABLE, MULTI_LIMIT_RECS); + private static final String QUERY = String.format(BASE_QUERY, DATASET, TABLE, LIMIT_RECS); + private static final String MULTI_QUERY = + String.format(BASE_QUERY, DATASET, TABLE, MULTI_LIMIT_RECS); + private static final String INVALID_QUERY = + String.format( + "select into %s.%s order by IntegerField asc LIMIT %s", DATASET, TABLE, LIMIT_RECS); + + private static final Schema BQ_SCHEMA = + Schema.of( + Field.newBuilder("TimestampField", StandardSQLTypeName.TIMESTAMP) + .setMode(Field.Mode.NULLABLE) + .setDescription("TimestampDescription") + .build(), + Field.newBuilder("StringField", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("StringDescription") + .build(), + Field.newBuilder("IntegerArrayField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.REPEATED) + .setDescription("IntegerArrayDescription") + .build(), + Field.newBuilder("BooleanField", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .setDescription("BooleanDescription") + .build(), + Field.newBuilder("BytesField", StandardSQLTypeName.BYTES) + .setMode(Field.Mode.NULLABLE) + .setDescription("BytesDescription") + .build(), + Field.newBuilder( + "RecordField", + StandardSQLTypeName.STRUCT, + Field.newBuilder("StringField", StandardSQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .setDescription("StringDescription") + .build(), + Field.newBuilder("BooleanField", StandardSQLTypeName.BOOL) + .setMode(Field.Mode.NULLABLE) + .setDescription("BooleanDescription") + .build()) + .setMode(Field.Mode.NULLABLE) + .setDescription("RecordDescription") + .build(), + Field.newBuilder("IntegerField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("IntegerDescription") + .build(), + Field.newBuilder("GeographyField", StandardSQLTypeName.GEOGRAPHY) + .setMode(Field.Mode.NULLABLE) + .setDescription("GeographyDescription") + .build(), + Field.newBuilder("NumericField", StandardSQLTypeName.NUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("NumericDescription") + .build(), + Field.newBuilder("BigNumericField", StandardSQLTypeName.BIGNUMERIC) + .setMode(Field.Mode.NULLABLE) + .setDescription("BigNumericDescription") + .build(), + Field.newBuilder("TimeField", StandardSQLTypeName.TIME) + .setMode(Field.Mode.NULLABLE) + .setDescription("TimeDescription") + .build(), + Field.newBuilder("DateField", StandardSQLTypeName.DATE) + .setMode(Field.Mode.NULLABLE) + .setDescription("DateDescription") + .build(), + Field.newBuilder("JSONField", StandardSQLTypeName.JSON) + .setMode(Field.Mode.NULLABLE) + .setDescription("JSONFieldDescription") + .build(), + Field.newBuilder("IntervalField", StandardSQLTypeName.INTERVAL) + .setMode(Field.Mode.NULLABLE) + .setDescription("IntervalFieldDescription") + .build()); + + @BeforeAll + public static void beforeClass() throws InterruptedException, IOException { + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + bigquery = bigqueryHelper.getOptions().getService(); + createDataset(DATASET); + createTable(DATASET, TABLE, BQ_SCHEMA); + populateTestRecords(DATASET, TABLE); + } + + @AfterAll + public static void afterClass() { + try { + if (bigquery != null) { + deleteTable(DATASET, TABLE); + RemoteBigQueryHelper.forceDelete(bigquery, DATASET); + } + } catch (BigQueryException e) { + throw new RuntimeException("Error clearing the test dataset " + e); + } + } + + @Test + public void testInvalidQuery() throws BigQuerySQLException { + Connection connection = getConnection(); + try { + BigQuerySQLException ex = + assertThrows(BigQuerySQLException.class, () -> connection.executeSelect(INVALID_QUERY)); + assertNotNull(ex.getMessage()); + assertTrue(ex.getMessage().toLowerCase().contains("unexpected keyword into")); + } finally { + connection.close(); + } + } + + /* + This tests for the order of the records as well as the value of the records using testForAllDataTypeValues + */ + @Test + public void testIterateAndOrder() throws SQLException { + Connection connection = getConnection(); + try { + BigQueryResult bigQueryResult = connection.executeSelect(QUERY); + logger.log(Level.INFO, "Query used: {0}", QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + + int prevIntegerFieldVal = 0; + while (rs.next()) { + if (cnt == 0) { // first row is supposed to be null + assertNull(rs.getString("StringField")); + assertNull(rs.getString("GeographyField")); + Object intAryField = rs.getObject("IntegerArrayField"); + if (intAryField instanceof JsonStringArrayList) { + assertEquals( + new JsonStringArrayList(), + ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + } + assertFalse(rs.getBoolean("BooleanField")); + assertTrue(0.0d == rs.getDouble("BigNumericField")); + assertTrue(0 == rs.getInt("IntegerField")); + assertTrue(0L == rs.getLong("NumericField")); + assertNull(rs.getBytes("BytesField")); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getDate("DateField")); + assertNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNull(rs.getString("StringField_1")); + assertNull(rs.getString("hello")); // equivalent of testJsonType + assertEquals(0, rs.getInt("id")); + + } else { // remaining rows are supposed to be non null + assertNotNull(rs.getString("StringField")); + assertNotNull(rs.getString("GeographyField")); + assertNotNull(rs.getObject("IntegerArrayField")); + assertTrue(rs.getBoolean("BooleanField")); + assertTrue(0.0d < rs.getDouble("BigNumericField")); + assertTrue(0 < rs.getInt("IntegerField")); + assertTrue(0L < rs.getLong("NumericField")); + assertNotNull(rs.getBytes("BytesField")); + assertNotNull(rs.getTimestamp("TimestampField")); + assertNotNull(rs.getTime("TimeField")); + assertNotNull(rs.getDate("DateField")); + assertNotNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNotNull(rs.getString("StringField_1")); + + // check the order of the records + assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); + prevIntegerFieldVal = rs.getInt("IntegerField"); + + testForAllDataTypeValues(rs, cnt); // asserts the value of each row + } + ++cnt; + } + assertEquals(LIMIT_RECS, cnt); // all the records were retrieved + } finally { + connection.close(); + } + } + + /* + This tests for the order of the records using default connection settings as well as the value of the records using testForAllDataTypeValues + */ + @Test + void testIterateAndOrderDefaultConnSettings() throws SQLException { + Connection connection = bigquery.createConnection(); + try { + BigQueryResult bigQueryResult = connection.executeSelect(QUERY); + logger.log(Level.INFO, "Query used: {0}", QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + + int prevIntegerFieldVal = 0; + while (rs.next()) { + if (cnt == 0) { // first row is supposed to be null + assertNull(rs.getString("StringField")); + assertNull(rs.getString("GeographyField")); + Object intAryField = rs.getObject("IntegerArrayField"); + if (intAryField instanceof JsonStringArrayList) { + assertEquals( + new JsonStringArrayList(), + ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + } + assertFalse(rs.getBoolean("BooleanField")); + assertTrue(0.0d == rs.getDouble("BigNumericField")); + assertTrue(0 == rs.getInt("IntegerField")); + assertTrue(0L == rs.getLong("NumericField")); + assertNull(rs.getBytes("BytesField")); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getDate("DateField")); + assertNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNull(rs.getString("StringField_1")); + assertNull(rs.getString("hello")); // equivalent of testJsonType + assertEquals(0, rs.getInt("id")); + + } else { // remaining rows are supposed to be non null + assertNotNull(rs.getString("StringField")); + assertNotNull(rs.getString("GeographyField")); + assertNotNull(rs.getObject("IntegerArrayField")); + assertTrue(rs.getBoolean("BooleanField")); + assertTrue(0.0d < rs.getDouble("BigNumericField")); + assertTrue(0 < rs.getInt("IntegerField")); + assertTrue(0L < rs.getLong("NumericField")); + assertNotNull(rs.getBytes("BytesField")); + assertNotNull(rs.getTimestamp("TimestampField")); + assertNotNull(rs.getTime("TimeField")); + assertNotNull(rs.getDate("DateField")); + assertNotNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNotNull(rs.getString("StringField_1")); + + // check the order of the records + assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); + prevIntegerFieldVal = rs.getInt("IntegerField"); + + testForAllDataTypeValues(rs, cnt); // asserts the value of each row + } + ++cnt; + } + assertEquals(LIMIT_RECS, cnt); // all the records were retrieved + } finally { + connection.close(); + } + } + + /* + This tests interrupts the execution in between and checks if it has been interrupted successfully while using ReadAPI + */ + @Test + void testConnectionClose() throws SQLException { + Connection connection = bigquery.createConnection(); + try { + assertNotNull(connection, "bigquery.createConnection() returned null"); + BigQueryResult bigQueryResult = connection.executeSelect(QUERY); + logger.log(Level.INFO, "Query used: {0}", QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + while (rs.next()) { + ++cnt; + if (cnt == 50000) { // interrupt at 50K + break; + } + } + assertTrue(LIMIT_RECS > cnt); + // we stopped at 50K but still we can expect additional records (typically ~100) + // to be retrieved + // as a number of records should have been already buffered. less than + // LIMIT_RECS should be retrieved + } finally { + connection.close(); + } + } + + @Test + void testMultipleRuns() throws SQLException { + int totalCnt = 0; + Connection connection = getConnection(); + try { + BigQueryResult bigQueryResult = connection.executeSelect(MULTI_QUERY); + logger.log(Level.INFO, "Query used: {0}", MULTI_QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + + int prevIntegerFieldVal = 0; + while (rs.next()) { + if (cnt == 0) { // first row is supposed to be null + assertNull(rs.getString("StringField")); + assertNull(rs.getString("GeographyField")); + Object intAryField = rs.getObject("IntegerArrayField"); + if (intAryField instanceof JsonStringArrayList) { + assertEquals( + new JsonStringArrayList(), + ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + } + assertFalse(rs.getBoolean("BooleanField")); + assertTrue(0.0d == rs.getDouble("BigNumericField")); + assertTrue(0 == rs.getInt("IntegerField")); + assertTrue(0L == rs.getLong("NumericField")); + assertNull(rs.getBytes("BytesField")); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getDate("DateField")); + assertNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNull(rs.getString("StringField_1")); + assertNull(rs.getString("hello")); // equivalent of testJsonType + assertEquals(0, rs.getInt("id")); + + } else { // remaining rows are supposed to be non null + // check the order of the records + assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); + prevIntegerFieldVal = rs.getInt("IntegerField"); + + testForAllDataTypeValues(rs, cnt); // asserts the value of each row + } + ++cnt; + } + totalCnt += cnt; + } finally { + connection.close(); + } + + // Repeat the same run + Connection connection1 = getConnection(); + try { + BigQueryResult bigQueryResult = connection1.executeSelect(MULTI_QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + int prevIntegerFieldVal = 0; + while (rs.next()) { + if (cnt == 0) { // first row is supposed to be null + assertNull(rs.getString("StringField")); + assertNull(rs.getString("GeographyField")); + Object intAryField = rs.getObject("IntegerArrayField"); + if (intAryField instanceof JsonStringArrayList) { + assertEquals( + new JsonStringArrayList(), + ((JsonStringArrayList) intAryField)); // null array is returned as an empty array + } + assertFalse(rs.getBoolean("BooleanField")); + assertTrue(0.0d == rs.getDouble("BigNumericField")); + assertTrue(0 == rs.getInt("IntegerField")); + assertTrue(0L == rs.getLong("NumericField")); + assertNull(rs.getBytes("BytesField")); + assertNull(rs.getTimestamp("TimestampField")); + assertNull(rs.getTime("TimeField")); + assertNull(rs.getDate("DateField")); + assertNull(rs.getString("JSONField")); + assertFalse(rs.getBoolean("BooleanField_1")); + assertNull(rs.getString("StringField_1")); + assertNull(rs.getString("hello")); // equivalent of testJsonType + assertEquals(0, rs.getInt("id")); + + } else { // remaining rows are supposed to be non null + // check the order of the records + assertTrue(prevIntegerFieldVal < rs.getInt("IntegerField")); + prevIntegerFieldVal = rs.getInt("IntegerField"); + + testForAllDataTypeValues(rs, cnt); // asserts the value of each row + } + ++cnt; + } + totalCnt += cnt; + } finally { + connection1.close(); + } + assertEquals(MULTI_LIMIT_RECS * 2, totalCnt); + } + + @Test + void testPositionalParams() + throws SQLException { // Bypasses Read API as it doesnt support Positional Params + Connection connection = getConnection(); + try { + Parameter dateParam = + Parameter.newBuilder().setValue(QueryParameterValue.date("2022-01-01")).build(); + Parameter boolParam = Parameter.newBuilder().setValue(QueryParameterValue.bool(true)).build(); + Parameter intParam = Parameter.newBuilder().setValue(QueryParameterValue.int64(1)).build(); + Parameter numericParam = + Parameter.newBuilder().setValue(QueryParameterValue.numeric(new BigDecimal(100))).build(); + List parameters = ImmutableList.of(dateParam, boolParam, intParam, numericParam); + + BigQueryResult bigQueryResult = connection.executeSelect(POSITIONAL_QUERY, parameters); + logger.log(Level.INFO, "Query used: {0}", POSITIONAL_QUERY); + ResultSet rs = bigQueryResult.getResultSet(); + int cnt = 0; + while (rs.next()) { + assertFalse(rs.getBoolean("BooleanField")); + assertTrue(0.0d <= rs.getDouble("BigNumericField")); + assertTrue(0 <= rs.getInt("IntegerField")); + assertTrue(0L <= rs.getLong("NumericField")); + assertNotNull(rs.getBytes("BytesField")); + assertNotNull(rs.getTimestamp("TimestampField")); + assertNotNull(rs.getTime("TimeField")); + assertNotNull(rs.getDate("DateField")); + assertNotNull(rs.getString("JSONField")); + assertTrue(rs.getBoolean("BooleanField_1")); + assertNotNull(rs.getString("StringField_1")); + ++cnt; + } + assertEquals(MULTI_LIMIT_RECS, cnt); + } finally { + connection.close(); + } + } + + @Test + // This testcase reads rows in bulk for a public table to make sure we do not get + // table-not-found exception. Ref: b/241134681 . This exception has been seen while reading data + // in bulk + void testForTableNotFound() throws SQLException { + int recordCnt = 50000000; // 5Mil + String query = + String.format( + "SELECT * FROM `bigquery-samples.wikipedia_benchmark.Wiki10B` LIMIT %s", recordCnt); + logger.log(Level.INFO, "Query used: {0}", query); + String dataSet = RemoteBigQueryHelper.generateDatasetName(); + String table = "TAB_" + UUID.randomUUID(); + createDataset(dataSet); + TableId targetTable = + TableId.of( + ServiceOptions.getDefaultProjectId(), + dataSet, + table); // table will be created implicitly + + ConnectionSettings conSet = + ConnectionSettings.newBuilder() + .setUseReadAPI(true) // enable read api + .setDestinationTable(targetTable) + .setAllowLargeResults(true) + .build(); + + Connection connection = + BigQueryOptions.getDefaultInstance().getService().createConnection(conSet); + BigQueryResult bigQueryResultSet = connection.executeSelect(query); + assertNotNull(getResultHashWiki(bigQueryResultSet)); // this iterated through all the rows + assertTrue( + (recordCnt == bigQueryResultSet.getTotalRows()) + || (-1 + == bigQueryResultSet + .getTotalRows())); // either job should return the actual count or -1 if the job + // is still running + try { + deleteTable(dataSet, table); + deleteDataset(dataSet); + } catch (Exception e) { + logger.log( + Level.WARNING, + String.format( + "Error [ %s ] while deleting dataset: %s , table: %s", + e.getMessage(), dataSet, table)); + } + } + + // this iterated through all the rows (just reads the title column) + private Long getResultHashWiki(BigQueryResult bigQueryResultSet) throws SQLException { + ResultSet rs = bigQueryResultSet.getResultSet(); + long hash = 0L; + System.out.print("\n Running"); + while (rs.next()) { + hash += rs.getString("title") == null ? 0 : rs.getString("title").hashCode(); + } + return hash; + } + + // asserts the value of each row + private static void testForAllDataTypeValues(ResultSet rs, int cnt) throws SQLException { + // Testing JSON type + assertEquals("\"world\"", rs.getString("hello")); // BQ stores the value as "world" + assertEquals(100, rs.getInt("id")); + assertEquals("{\"hello\":\"world\",\"id\":100}", rs.getString("JSONField")); + + // String and Geography types + assertEquals(String.format("String Val %s", cnt), rs.getString("StringField")); + assertEquals("POINT(1 2)", rs.getString("GeographyField")); + + // Array type tests + if (rs.getObject("IntegerArrayField") instanceof JsonStringArrayList) { + JsonStringArrayList ary = (JsonStringArrayList) rs.getObject("IntegerArrayField"); + assertEquals(3, ary.size()); + assertEquals(1, ary.get(0).intValue()); + assertEquals(2, ary.get(1).intValue()); + assertEquals(3, ary.get(2).intValue()); + } + + // BigNumeric, int and Numeric + assertTrue(10000000L + cnt == rs.getDouble("BigNumericField")); + assertEquals(1 + cnt, rs.getInt("IntegerField")); + assertEquals(100 + cnt, rs.getLong("NumericField")); + // Test Byte field + assertEquals("TestByteValue", new String(rs.getBytes("BytesField"), StandardCharsets.UTF_8)); + + // Struct Fields + assertFalse(rs.getBoolean("BooleanField_1")); + assertEquals(String.format("Str Val %s", cnt), rs.getString("StringField_1")); + + // Timestamp, Time, DateTime and Date fields + assertEquals(1649064795000L, rs.getTimestamp("TimestampField").getTime()); + assertEquals(Date.valueOf("2022-01-01").toString(), rs.getDate("DateField").toString()); + // Time is represented independent of a specific date and timezone. For example a 12:11:35 (GMT) + // is returned as + // 17:11:35 (GMT+5:30) . So we need to adjust the offset + int offset = + TimeZone.getTimeZone(ZoneId.systemDefault()) + .getOffset(new java.util.Date().getTime()); // offset in seconds + assertEquals( + Time.valueOf(LocalTime.of(12, 11, 35)).getTime() + offset, + rs.getTime("TimeField").getTime()); + } + + private static void populateTestRecords(String datasetName, String tableName) { + TableId tableId = TableId.of(datasetName, tableName); + for (int batchCnt = 1; batchCnt <= NUM_BATCHES; batchCnt++) { + addBatchRecords(tableId); + } + } + + private static void addBatchRecords(TableId tableId) { + Map nullRow = new HashMap<>(); + try { + InsertAllRequest.Builder reqBuilder = InsertAllRequest.newBuilder(tableId); + if (rowCnt == 0) { + reqBuilder.addRow(nullRow); + } + for (int i = 0; i < REC_PER_BATCHES; i++) { + reqBuilder.addRow(getNextRow()); + } + InsertAllResponse response = bigquery.insertAll(reqBuilder.build()); + + if (response.hasErrors()) { + // If any of the insertions failed, this lets you inspect the errors + for (Map.Entry> entry : response.getInsertErrors().entrySet()) { + logger.log(Level.WARNING, "Exception while adding records {0}", entry.getValue()); + } + throw new BigQueryException(0, "Response has errors"); + } + } catch (BigQueryException e) { + logger.log(Level.WARNING, "Exception while adding records {0}", e); + throw new BigQueryException(0, "Error in addBatchRecords", e); + } + } + + static void createTable(String datasetName, String tableName, Schema schema) { + try { + TableId tableId = TableId.of(datasetName, tableName); + TableDefinition tableDefinition = StandardTableDefinition.of(schema); + TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); + Table table = bigquery.create(tableInfo); + assertTrue(table.exists()); + } catch (BigQueryException e) { + fail("Table was not created. \n" + e); + } + } + + static void deleteTable(String datasetName, String tableName) { + try { + assertTrue(bigquery.delete(TableId.of(datasetName, tableName))); + } catch (BigQueryException e) { + fail("Table was not deleted. \n" + e); + } + } + + static void createDataset(String datasetName) { + try { + DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build(); + Dataset newDataset = bigquery.create(datasetInfo); + assertNotNull(newDataset.getDatasetId().getDataset()); + } catch (BigQueryException e) { + fail("Dataset was not created. \n" + e); + } + } + + static void deleteDataset(String datasetName) { + try { + DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build(); + assertTrue(bigquery.delete(datasetInfo.getDatasetId())); + } catch (BigQueryException e) { + fail("Dataset was not deleted. \n" + e); + } + } + + private Connection getConnection() { + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setDefaultDataset(DatasetId.of(DATASET)) + .build(); // Read API is enabled by default + return bigquery.createConnection(connectionSettings); + } + + private static Map getNextRow() { + rowCnt++; + Map row = new HashMap<>(); + Map structVal = new HashMap<>(); + structVal.put("StringField", "Str Val " + rowCnt); + structVal.put("BooleanField", false); + row.put("RecordField", structVal); // struct + row.put("TimestampField", "2022-04-04 15:03:15.000 +05:30"); + row.put("StringField", "String Val " + rowCnt); + row.put("IntegerArrayField", new int[] {1, 2, 3}); + row.put("BooleanField", true); + row.put("BytesField", BYTES_BASE64); + row.put("IntegerField", 1 + rowCnt); + row.put("GeographyField", "POINT(1 2)"); + row.put("NumericField", 100 + rowCnt); + row.put("BigNumericField", 10000000L + rowCnt); + row.put("TimeField", "12:11:35"); + row.put("DateField", "2022-01-01"); + row.put("JSONField", "{\"hello\":\"world\",\"id\":100}"); + row.put("IntervalField", "10000-0 3660000 87840000:0:0"); + return row; + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITRemoteUDFTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITRemoteUDFTest.java new file mode 100644 index 0000000000..d3610b4fff --- /dev/null +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITRemoteUDFTest.java @@ -0,0 +1,136 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.google.cloud.bigquery.it; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import com.google.cloud.ServiceOptions; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.DatasetInfo; +import com.google.cloud.bigquery.RemoteFunctionOptions; +import com.google.cloud.bigquery.Routine; +import com.google.cloud.bigquery.RoutineArgument; +import com.google.cloud.bigquery.RoutineId; +import com.google.cloud.bigquery.RoutineInfo; +import com.google.cloud.bigquery.StandardSQLDataType; +import com.google.cloud.bigquery.connection.v1.CloudResourceProperties; +import com.google.cloud.bigquery.connection.v1.Connection; +import com.google.cloud.bigquery.connection.v1.CreateConnectionRequest; +import com.google.cloud.bigquery.connection.v1.DeleteConnectionRequest; +import com.google.cloud.bigquery.connection.v1.LocationName; +import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import com.google.cloud.bigqueryconnection.v1.ConnectionServiceClient; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Test; + +class ITRemoteUDFTest { + + private static final String ID = UUID.randomUUID().toString().substring(0, 8); + private static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); + private static final String CONNECTION_ID = "test-connection-id-" + ID; + private static final String LOCATION = "US"; + private static final String PARENT = LocationName.of(PROJECT_ID, LOCATION).toString(); + private static final String REMOTE_ENDPOINT = "https://aaabbbccc-uc.a.run.app"; + private static final String ROUTINE_DATASET = RemoteBigQueryHelper.generateDatasetName(); + private static ConnectionServiceClient client; + private static Connection connection; + private static BigQuery bigquery; + + @BeforeEach + void setUp() throws IOException { + RemoteBigQueryHelper bigqueryHelper = RemoteBigQueryHelper.create(); + bigquery = bigqueryHelper.getOptions().getService(); + client = ConnectionServiceClient.create(); + + DatasetInfo info = + DatasetInfo.newBuilder(ROUTINE_DATASET).setDescription("java routine lifecycle").build(); + bigquery.create(info); + CloudResourceProperties cloudResourceProperties = CloudResourceProperties.newBuilder().build(); + CreateConnectionRequest request = + CreateConnectionRequest.newBuilder() + .setParent(PARENT) + .setConnection( + Connection.newBuilder().setCloudResource(cloudResourceProperties).build()) + .setConnectionId(CONNECTION_ID) + .build(); + connection = client.createConnection(request); + } + + @AfterAll + static void afterClass() { + if (bigquery != null) { + RemoteBigQueryHelper.forceDelete(bigquery, ROUTINE_DATASET); + } + + // In JUnit, @BeforeEach only runs before a test is invoked. If a test never runs, + // then the logic inside @BeforeEach doesn't (e.g. connection was never created). + // This checks to ensure that connection was created before deleting. + if (client != null && connection != null) { + DeleteConnectionRequest request = + DeleteConnectionRequest.newBuilder().setName(connection.getName()).build(); + client.deleteConnection(request); + client.close(); + } + } + + @Disabled("https://github.com/googleapis/java-bigquery/issues/4103") + @Test + void testRoutineRemoteUDF() { + String routineName = RemoteBigQueryHelper.generateRoutineName(); + RoutineId routineId = RoutineId.of(ROUTINE_DATASET, routineName); + Map userDefinedContext = + new HashMap() { + { + put("key1", "value1"); + put("key2", "value2"); + } + }; + + RemoteFunctionOptions remoteFunctionOptions = + RemoteFunctionOptions.newBuilder() + .setEndpoint(REMOTE_ENDPOINT) + .setConnection(connection.getName()) + .setMaxBatchingRows(Long.valueOf(30)) + .setUserDefinedContext(userDefinedContext) + .build(); + RoutineInfo routineInfo = + RoutineInfo.newBuilder(routineId) + .setRoutineType("SCALAR_FUNCTION") + .setArguments( + ImmutableList.of( + RoutineArgument.newBuilder() + .setName("x") + .setDataType(StandardSQLDataType.newBuilder("INT64").build()) + .build())) + .setRemoteFunctionOptions(remoteFunctionOptions) + .setReturnType(StandardSQLDataType.newBuilder("INT64").build()) + .build(); + + Routine routine = bigquery.create(routineInfo); + assertNotNull(routine); + assertEquals(routine.getRoutineType(), "SCALAR_FUNCTION"); + assertEquals(REMOTE_ENDPOINT, routine.getRemoteFunctionOptions().getEndpoint()); + assertEquals(connection.getName(), routine.getRemoteFunctionOptions().getConnection()); + } +} diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java index 576e5c4e37..3968cd05e2 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java @@ -21,7 +21,7 @@ import com.google.api.services.bigquery.model.DatasetList; import com.google.api.services.bigquery.model.DatasetReference; import java.util.Collections; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class HttpBigQueryRpcTest { @Test @@ -35,7 +35,8 @@ public void testListToDataset() { .setId("project-id:dataset-id") .setFriendlyName("friendly") .setKind("bigquery#dataset") - .setLabels(Collections.singletonMap("foo", "bar")); + .setLabels(Collections.singletonMap("foo", "bar")) + .setLocation("test-region-1"); Dataset dataset = HttpBigQueryRpc.LIST_TO_DATASET.apply(listDataSet); assertThat(dataset.getKind()).isEqualTo("bigquery#dataset"); @@ -43,5 +44,6 @@ public void testListToDataset() { assertThat(dataset.getFriendlyName()).isEqualTo("friendly"); assertThat(dataset.getDatasetReference()).isEqualTo(datasetRef); assertThat(dataset.getLabels()).containsExactly("foo", "bar"); + assertThat(dataset.getLocation()).isEqualTo("test-region-1"); } } diff --git a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java index 9b2ef4804f..f66df17ab5 100644 --- a/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java +++ b/google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java @@ -16,8 +16,8 @@ package com.google.cloud.bigquery.testing; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption; @@ -25,15 +25,14 @@ import com.google.cloud.http.HttpTransportOptions; import java.io.ByteArrayInputStream; import java.io.InputStream; -import java.util.concurrent.ExecutionException; -import org.junit.Test; -import org.junit.runner.RunWith; +import java.time.Duration; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mockito; -import org.mockito.junit.MockitoJUnitRunner; -import org.threeten.bp.Duration; +import org.mockito.junit.jupiter.MockitoExtension; -@RunWith(MockitoJUnitRunner.class) -public class RemoteBigQueryHelperTest { +@ExtendWith(MockitoExtension.class) +class RemoteBigQueryHelperTest { private static final String DATASET_NAME = "dataset-name"; private static final String PROJECT_ID = "project-id"; @@ -67,7 +66,7 @@ public class RemoteBigQueryHelperTest { private static final InputStream JSON_KEY_STREAM = new ByteArrayInputStream(JSON_KEY.getBytes()); @Test - public void testForceDelete() throws InterruptedException, ExecutionException { + void testForceDelete() { BigQuery bigqueryMock = Mockito.mock(BigQuery.class); Mockito.when(bigqueryMock.delete(DATASET_NAME, DatasetDeleteOption.deleteContents())) .thenReturn(true); @@ -76,15 +75,15 @@ public void testForceDelete() throws InterruptedException, ExecutionException { } @Test - public void testCreateFromStream() { + void testCreateFromStream() { RemoteBigQueryHelper helper = RemoteBigQueryHelper.create(PROJECT_ID, JSON_KEY_STREAM); BigQueryOptions options = helper.getOptions(); assertEquals(PROJECT_ID, options.getProjectId()); assertEquals(60000, ((HttpTransportOptions) options.getTransportOptions()).getConnectTimeout()); assertEquals(60000, ((HttpTransportOptions) options.getTransportOptions()).getReadTimeout()); assertEquals(10, options.getRetrySettings().getMaxAttempts()); - assertEquals(Duration.ofMillis(30000), options.getRetrySettings().getMaxRetryDelay()); - assertEquals(Duration.ofMillis(120000), options.getRetrySettings().getTotalTimeout()); - assertEquals(Duration.ofMillis(250), options.getRetrySettings().getInitialRetryDelay()); + assertEquals(Duration.ofMillis(1000), options.getRetrySettings().getMaxRetryDelayDuration()); + assertEquals(Duration.ofMillis(3000), options.getRetrySettings().getTotalTimeoutDuration()); + assertEquals(Duration.ofMillis(100), options.getRetrySettings().getInitialRetryDelayDuration()); } } diff --git a/google-cloud-bigquery/src/test/resources/META-INF/native-image/reflect-config.json b/google-cloud-bigquery/src/test/resources/META-INF/native-image/reflect-config.json new file mode 100644 index 0000000000..b999445994 --- /dev/null +++ b/google-cloud-bigquery/src/test/resources/META-INF/native-image/reflect-config.json @@ -0,0 +1,23 @@ +[ + { + "name":"java.lang.Object", + "methods":[{"name":"","parameterTypes":[] }] + }, + { + "name":"com.google.api.client.googleapis.json.GoogleJsonError", + "methods":[ + {"name":"","parameterTypes":[] }] + }, + { + "name":"com.google.api.client.googleapis.json.GoogleJsonError$Details", + "methods":[{"name":"","parameterTypes":[] }] + }, + { + "name":"com.google.api.client.googleapis.json.GoogleJsonError$ErrorInfo", + "methods":[{"name":"","parameterTypes":[]}] + }, + { + "name":"java.util.HashMap", + "methods":[{"name":"","parameterTypes":[] }] + } +] \ No newline at end of file diff --git a/google-cloud-bigquery/src/test/resources/META-INF/native-image/resource-config.json b/google-cloud-bigquery/src/test/resources/META-INF/native-image/resource-config.json new file mode 100644 index 0000000000..97298417ac --- /dev/null +++ b/google-cloud-bigquery/src/test/resources/META-INF/native-image/resource-config.json @@ -0,0 +1,4 @@ +{ + "resources":[{"pattern": ".*.csv"}, + {"pattern": ".*src/test/resources/sessionTest.csv"}] +} \ No newline at end of file diff --git a/google-cloud-bigquery/src/test/resources/junit-platform.properties b/google-cloud-bigquery/src/test/resources/junit-platform.properties new file mode 100644 index 0000000000..3b9a816b45 --- /dev/null +++ b/google-cloud-bigquery/src/test/resources/junit-platform.properties @@ -0,0 +1,3 @@ +junit.jupiter.execution.parallel.enabled=true + +junit.jupiter.execution.parallel.mode.default=concurrent \ No newline at end of file diff --git a/google-cloud-bigquery/src/test/resources/sessionTest.csv b/google-cloud-bigquery/src/test/resources/sessionTest.csv new file mode 100644 index 0000000000..f500c80c14 --- /dev/null +++ b/google-cloud-bigquery/src/test/resources/sessionTest.csv @@ -0,0 +1,51 @@ +id,firstname,lastname,email,profession +100,Rani,Merell,Rani.Merell@yopmail.com,firefighter +101,Goldie,Dex,Goldie.Dex@yopmail.com,developer +102,Cristabel,Munn,Cristabel.Munn@yopmail.com,developer +103,Genevra,Strephon,Genevra.Strephon@yopmail.com,firefighter +104,Augustine,Thema,Augustine.Thema@yopmail.com,doctor +105,Jemie,Gombach,Jemie.Gombach@yopmail.com,police officer +106,Maye,Stuart,Maye.Stuart@yopmail.com,developer +107,Ayn,Carmena,Ayn.Carmena@yopmail.com,worker +108,Gale,Celestine,Gale.Celestine@yopmail.com,doctor +109,Alex,Jerold,Alex.Jerold@yopmail.com,firefighter +110,Violet,Giule,Violet.Giule@yopmail.com,firefighter +111,Starla,Uird,Starla.Uird@yopmail.com,doctor +112,Tarra,Pelagias,Tarra.Pelagias@yopmail.com,police officer +113,Eugine,Deny,Eugine.Deny@yopmail.com,doctor +114,Shirlee,Ricarda,Shirlee.Ricarda@yopmail.com,doctor +115,Ariela,Penelopa,Ariela.Penelopa@yopmail.com,worker +116,Lelah,Astra,Lelah.Astra@yopmail.com,police officer +117,Debee,Deegan,Debee.Deegan@yopmail.com,developer +118,Pollyanna,Euridice,Pollyanna.Euridice@yopmail.com,worker +119,Cathie,Halsey,Cathie.Halsey@yopmail.com,firefighter +120,Rebeca,Quinn,Rebeca.Quinn@yopmail.com,doctor +121,Paulita,Arquit,Paulita.Arquit@yopmail.com,police officer +122,Rebeca,Emanuel,Rebeca.Emanuel@yopmail.com,firefighter +123,Tera,Ilka,Tera.Ilka@yopmail.com,firefighter +124,Orsola,Briney,Orsola.Briney@yopmail.com,doctor +125,Paulita,Wyn,Paulita.Wyn@yopmail.com,doctor +126,Constance,Christine,Constance.Christine@yopmail.com,firefighter +127,Claresta,Kinnard,Claresta.Kinnard@yopmail.com,developer +128,Leanna,Mendez,Leanna.Mendez@yopmail.com,developer +129,Corina,Chabot,Corina.Chabot@yopmail.com,developer +130,Romona,Audly,Romona.Audly@yopmail.com,worker +131,Cordi,Lynn,Cordi.Lynn@yopmail.com,firefighter +132,Sheree,Tyson,Sheree.Tyson@yopmail.com,worker +133,Jinny,Bevin,Jinny.Bevin@yopmail.com,police officer +134,Kassey,Havens,Kassey.Havens@yopmail.com,firefighter +135,Wanda,Thema,Wanda.Thema@yopmail.com,developer +136,Vita,Jagir,Vita.Jagir@yopmail.com,developer +137,Alie,Aprile,Alie.Aprile@yopmail.com,firefighter +138,Modestia,Jena,Modestia.Jena@yopmail.com,doctor +139,Cyndie,Pelagias,Cyndie.Pelagias@yopmail.com,worker +140,Ariela,Lilybelle,Ariela.Lilybelle@yopmail.com,firefighter +141,Jan,Parette,Jan.Parette@yopmail.com,firefighter +142,Merry,Horan,Merry.Horan@yopmail.com,developer +143,Katuscha,Candy,Katuscha.Candy@yopmail.com,police officer +144,Kerrin,Heisel,Kerrin.Heisel@yopmail.com,developer +145,Nollie,Magdalen,Nollie.Magdalen@yopmail.com,doctor +146,Karlee,Gordon,Karlee.Gordon@yopmail.com,developer +147,Dolli,Fadiman,Dolli.Fadiman@yopmail.com,firefighter +148,Leontine,Delp,Leontine.Delp@yopmail.com,worker +149,Ricky,Nadia,Ricky.Nadia@yopmail.com,doctor diff --git a/java.header b/java.header index 3a9b503aa2..d0970ba7d3 100644 --- a/java.header +++ b/java.header @@ -1,5 +1,5 @@ ^/\*$ -^ \* Copyright \d\d\d\d,? Google (Inc\.|LLC)( All [rR]ights [rR]eserved\.)?$ +^ \* Copyright \d\d\d\d,? Google (Inc\.|LLC)$ ^ \*$ ^ \* Licensed under the Apache License, Version 2\.0 \(the "License"\);$ ^ \* you may not use this file except in compliance with the License\.$ diff --git a/pom.xml b/pom.xml index 4cd778c24a..5ee48cbd03 100644 --- a/pom.xml +++ b/pom.xml @@ -4,7 +4,7 @@ com.google.cloud google-cloud-bigquery-parent pom - 1.127.5-SNAPSHOT + 2.60.1-SNAPSHOT BigQuery Parent https://github.com/googleapis/java-bigquery @@ -13,8 +13,8 @@ com.google.cloud - google-cloud-shared-config - 0.10.0 + sdk-platform-java-config + 3.57.0 @@ -31,7 +31,8 @@ Google LLC - + scm:git:git@github.com:googleapis/java-bigquery.git scm:git:git@github.com:googleapis/java-bigquery.git https://github.com/googleapis/java-bigquery @@ -41,16 +42,6 @@ https://github.com/googleapis/java-bigquery/issues GitHub Issues - - - sonatype-nexus-snapshots - https://oss.sonatype.org/content/repositories/snapshots - - - sonatype-nexus-staging - https://oss.sonatype.org/service/local/staging/deploy/maven2/ - - Apache-2.0 @@ -63,9 +54,7 @@ UTF-8 github google-cloud-bigquery-parent - v2-rev20210215-1.31.0 - - 0.19.0 + v2-rev20251012-2.0.0 @@ -73,7 +62,24 @@ com.google.cloud google-cloud-shared-dependencies - ${google.cloud.shared-dependencies.version} + ${google-cloud-shared-dependencies.version} + pom + import + + + + + com.google.cloud + google-cloud-bigquerystorage-bom + 3.22.0 + pom + import + + + + com.google.cloud + google-cloud-datacatalog-bom + 1.78.0 pom import @@ -81,13 +87,13 @@ org.checkerframework checker-compat-qual - 2.5.5 + 2.5.6 com.google.cloud google-cloud-bigquery - 1.127.5-SNAPSHOT + 2.60.1-SNAPSHOT @@ -95,52 +101,87 @@ google-api-services-bigquery ${google-api-services-bigquery.version} - - + + - junit - junit - 4.13.2 - test + org.threeten + threeten-extra + 1.8.0 + + com.google.truth truth - 1.1 + 1.4.4 test + + + org.checkerframework + checker-qual + + - org.mockito - mockito-core - 2.28.2 + com.google.cloud + google-cloud-storage + 2.53.3 test com.google.cloud - google-cloud-storage - 1.113.11 + google-cloud-bigqueryconnection + 2.74.0 test - org.assertj - assertj-core - - 2.9.1 + com.google.api.grpc + proto-google-cloud-bigqueryconnection-v1 + 2.70.0 test + + org.mockito + mockito-bom + 4.11.0 + pom + import + google-cloud-bigquery + google-cloud-bigquery-bom + google-cloud-bigquery-jdbc + + + + + org.apache.maven.plugins + maven-dependency-plugin + + + io.netty:netty-buffer + io.netty:netty-common + org.apache.arrow:arrow-memory-netty + com.google.api:gax + + org.junit.jupiter:junit-jupiter-engine + + + + + + org.apache.maven.plugins maven-project-info-reports-plugin - 3.1.1 + 3.9.0 @@ -167,7 +208,6 @@ org.apache.maven.plugins maven-javadoc-plugin - 3.2.0 html diff --git a/renovate.json b/renovate.json index af11317e23..dc6b01e796 100644 --- a/renovate.json +++ b/renovate.json @@ -7,67 +7,103 @@ ":updateNotScheduled", ":automergeDisabled", ":ignoreModulesAndTests", - ":maintainLockFilesDisabled", - ":autodetectPinVersions" + ":maintainLockFilesDisabled" ], - "packageRules": [ + "ignorePaths": [ + ".kokoro/requirements.txt" + ], + "customManagers": [ { - "packagePatterns": [ - "^com.google.guava:" + "customType": "regex", + "managerFilePatterns": [ + "/^.kokoro/continuous/graalvm-native.*.cfg$/", + "/^.kokoro/presubmit/graalvm-native.*.cfg$/" ], - "versionScheme": "docker" + "matchStrings": [ + "value: \"gcr.io/cloud-devrel-public-resources/graalvm.*:(?.*?)\"" + ], + "depNameTemplate": "com.google.cloud:sdk-platform-java-config", + "datasourceTemplate": "maven" }, { - "packagePatterns": [ - "*" + "customType": "regex", + "managerFilePatterns": [ + "/^.github/workflows/unmanaged_dependency_check.yaml$/" + ], + "matchStrings": [ + "uses: googleapis/sdk-platform-java/java-shared-dependencies/unmanaged-dependency-check@google-cloud-shared-dependencies/v(?.+?)\\n" ], - "semanticCommitType": "deps", - "semanticCommitScope": null + "depNameTemplate": "com.google.cloud:sdk-platform-java-config", + "datasourceTemplate": "maven" + } + ], + "packageRules": [ + { + "versioning": "docker", + "matchPackageNames": [ + "/^com.google.guava:/" + ] }, { - "packagePatterns": [ - "^org.apache.maven", - "^org.jacoco:", - "^org.codehaus.mojo:", - "^org.sonatype.plugins:", - "^com.coveo:", - "^com.google.cloud:google-cloud-shared-config" - ], "semanticCommitType": "build", - "semanticCommitScope": "deps" + "semanticCommitScope": "deps", + "matchPackageNames": [ + "/^org.apache.maven/", + "/^org.jacoco:/", + "/^org.codehaus.mojo:/", + "/^org.sonatype.plugins:/", + "/^com.coveo:/", + "/^com.google.cloud:google-cloud-shared-config/" + ] }, { - "packagePatterns": [ - "^com.google.cloud:google-cloud-bigquery", - "^com.google.cloud:libraries-bom", - "^com.google.cloud.samples:shared-configuration" - ], "semanticCommitType": "chore", - "semanticCommitScope": "deps" + "semanticCommitScope": "deps", + "matchPackageNames": [ + "/^com.google.cloud:google-cloud-bigquery$/", + "/^com.google.cloud:google-cloud-bigtable/", + "/^com.google.cloud:libraries-bom/", + "/^com.google.cloud.samples:shared-configuration/" + ] }, { - "packagePatterns": [ - "^junit:junit", - "^com.google.truth:truth", - "^org.mockito:mockito-core", - "^org.objenesis:objenesis" - ], "semanticCommitType": "test", - "semanticCommitScope": "deps" + "semanticCommitScope": "deps", + "matchPackageNames": [ + "/^junit:junit/", + "/^com.google.truth:truth/", + "/^org.mockito:mockito-core/", + "/^org.objenesis:objenesis/", + "/^com.google.cloud:google-cloud-storage/" + ] }, { - "packagePatterns": [ - "^com.google.cloud:google-cloud-" - ], - "ignoreUnstable": false + "ignoreUnstable": false, + "matchPackageNames": [ + "/^com.google.cloud:google-cloud-/" + ] }, { - "packagePatterns": [ - "^com.fasterxml.jackson.core" + "groupName": "jackson dependencies", + "matchPackageNames": [ + "/^com.fasterxml.jackson.core/" + ] + }, + { + "matchPackageNames": [ + "com.google.cloud:google-cloud-datacatalog-bom", + "com.google.cloud:google-cloud-bigqueryconnection-v1", + "com.google.cloud:google-cloud-bigqueryconnection" ], - "groupName": "jackson dependencies" + "groupName": "Google Cloud Java Monorepo Updates", + "groupSlug": "google-cloud-java-monorepo-release-updates" } ], - "semanticCommits": true, - "masterIssue": true + "semanticCommits": "enabled", + "dependencyDashboard": true, + "dependencyDashboardLabels": [ + "type: process" + ], + "prConcurrentLimit": 0, + "prHourlyLimit": 0 } diff --git a/samples/install-without-bom/pom.xml b/samples/install-without-bom/pom.xml index 2033fca532..cfcf9c503f 100644 --- a/samples/install-without-bom/pom.xml +++ b/samples/install-without-bom/pom.xml @@ -30,13 +30,14 @@ com.google.cloud.samples shared-configuration - 1.0.21 + 1.2.2 1.8 1.8 UTF-8 + 1.52.0 @@ -45,31 +46,62 @@ com.google.cloud google-cloud-bigquery - 1.127.4 + 2.53.0 com.google.oauth-client google-oauth-client-java6 - 1.31.2 + 1.39.0 com.google.oauth-client google-oauth-client-jetty - 1.31.4 + 1.39.0 + + io.opentelemetry + opentelemetry-api + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-context + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk-common + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-sdk-trace + ${opentelemetry.version} + + + io.opentelemetry + opentelemetry-exporter-logging + ${opentelemetry.version} + + com.google.cloud google-cloud-bigtable - 1.20.1 + 2.62.0 test com.google.cloud google-cloud-bigqueryconnection - 1.0.9 + 2.74.0 test @@ -81,7 +113,7 @@ com.google.truth truth - 1.1 + 1.4.4 test @@ -92,7 +124,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.2.0 + 3.6.1 add-snippets-source diff --git a/samples/pom.xml b/samples/pom.xml index b2e32cf3fd..c6ac76b1dd 100644 --- a/samples/pom.xml +++ b/samples/pom.xml @@ -34,7 +34,7 @@ com.google.cloud.samples shared-configuration - 1.0.21 + 1.2.2 @@ -54,7 +54,7 @@ org.apache.maven.plugins maven-deploy-plugin - 2.8.2 + 3.1.4 true @@ -62,7 +62,7 @@ org.sonatype.plugins nexus-staging-maven-plugin - 1.6.8 + 1.7.0 true diff --git a/samples/snapshot/pom.xml b/samples/snapshot/pom.xml index fb193c9b52..525e9c671f 100644 --- a/samples/snapshot/pom.xml +++ b/samples/snapshot/pom.xml @@ -30,7 +30,7 @@ com.google.cloud.samples shared-configuration - 1.0.21 + 1.2.2 @@ -39,35 +39,47 @@ UTF-8 + + + + io.opentelemetry + opentelemetry-bom + 1.52.0 + pom + import + + + + com.google.cloud google-cloud-bigquery - 1.127.5-SNAPSHOT + 2.60.1-SNAPSHOT com.google.oauth-client google-oauth-client-java6 - 1.31.2 + 1.39.0 com.google.oauth-client google-oauth-client-jetty - 1.31.4 + 1.39.0 com.google.cloud google-cloud-bigtable - 1.20.1 + 2.62.0 test com.google.cloud google-cloud-bigqueryconnection - 1.0.9 + 2.74.0 test @@ -79,9 +91,33 @@ com.google.truth truth - 1.1 + 1.4.4 test + + io.opentelemetry + opentelemetry-api + + + io.opentelemetry + opentelemetry-context + + + io.opentelemetry + opentelemetry-sdk + + + io.opentelemetry + opentelemetry-sdk-common + + + io.opentelemetry + opentelemetry-sdk-trace + + + io.opentelemetry + opentelemetry-exporter-logging + @@ -90,7 +126,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.2.0 + 3.6.1 add-snippets-source diff --git a/samples/snippets/pom.xml b/samples/snippets/pom.xml index 95f303a00d..1903381ad7 100644 --- a/samples/snippets/pom.xml +++ b/samples/snippets/pom.xml @@ -1,3 +1,4 @@ + - + 4.0.0 com.example.bigquery bigquery-google-cloud-samples @@ -26,7 +29,7 @@ com.google.cloud.samples shared-configuration - 1.0.21 + 1.2.2 @@ -44,7 +47,14 @@ com.google.cloud libraries-bom - 18.0.0 + 26.73.0 + pom + import + + + io.opentelemetry + opentelemetry-bom + 1.52.0 pom import @@ -56,6 +66,30 @@ com.google.cloud google-cloud-bigquery + + io.opentelemetry + opentelemetry-api + + + io.opentelemetry + opentelemetry-context + + + io.opentelemetry + opentelemetry-sdk + + + io.opentelemetry + opentelemetry-sdk-common + + + io.opentelemetry + opentelemetry-sdk-trace + + + io.opentelemetry + opentelemetry-exporter-logging + @@ -63,12 +97,12 @@ com.google.oauth-client google-oauth-client-java6 - 1.31.2 + 1.39.0 com.google.oauth-client google-oauth-client-jetty - 1.31.4 + 1.39.0 @@ -76,13 +110,13 @@ com.google.cloud google-cloud-bigtable - 1.20.1 + 2.62.0 test com.google.cloud google-cloud-bigqueryconnection - 1.0.9 + 2.74.0 test @@ -94,17 +128,19 @@ com.google.truth truth - 1.1 + 1.4.4 test + + org.apache.maven.plugins maven-deploy-plugin - 2.8.2 + 3.1.4 true @@ -112,7 +148,7 @@ org.sonatype.plugins nexus-staging-maven-plugin - 1.6.8 + 1.7.0 true diff --git a/samples/snippets/src/main/java/com/example/bigquery/AuthUserFlow.java b/samples/snippets/src/main/java/com/example/bigquery/AuthUserFlow.java index 0dd37f0d63..606a3916e3 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/AuthUserFlow.java +++ b/samples/snippets/src/main/java/com/example/bigquery/AuthUserFlow.java @@ -24,7 +24,7 @@ import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets; import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.json.JsonFactory; -import com.google.api.client.json.jackson2.JacksonFactory; +import com.google.api.client.json.gson.GsonFactory; import com.google.api.client.util.store.FileDataStoreFactory; import com.google.api.gax.paging.Page; import com.google.auth.oauth2.GoogleCredentials; @@ -49,7 +49,7 @@ public class AuthUserFlow { private static final File DATA_STORE_DIR = new File(AuthUserFlow.class.getResource("/").getPath(), "credentials"); - private static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance(); + private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance(); // i.e redirect_uri http://localhost:61984/Callback private static final int LOCAL_RECEIVER_PORT = 61984; diff --git a/samples/snippets/src/main/java/com/example/bigquery/AuthUserQuery.java b/samples/snippets/src/main/java/com/example/bigquery/AuthUserQuery.java index 508759620c..bed9159c90 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/AuthUserQuery.java +++ b/samples/snippets/src/main/java/com/example/bigquery/AuthUserQuery.java @@ -24,7 +24,7 @@ import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets; import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.json.JsonFactory; -import com.google.api.client.json.jackson2.JacksonFactory; +import com.google.api.client.json.gson.GsonFactory; import com.google.api.client.util.store.FileDataStoreFactory; import com.google.auth.oauth2.GoogleCredentials; import com.google.auth.oauth2.UserCredentials; @@ -49,7 +49,7 @@ public class AuthUserQuery { private static final File DATA_STORE_DIR = new File(AuthUserQuery.class.getResource("/").getPath(), "credentials"); - private static final JsonFactory JSON_FACTORY = JacksonFactory.getDefaultInstance(); + private static final JsonFactory JSON_FACTORY = GsonFactory.getDefaultInstance(); // i.e redirect_uri http://localhost:61984/Callback private static final int LOCAL_RECEIVER_PORT = 61984; diff --git a/samples/snippets/src/main/java/com/example/bigquery/AuthorizeDataset.java b/samples/snippets/src/main/java/com/example/bigquery/AuthorizeDataset.java new file mode 100644 index 0000000000..949e583e48 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/AuthorizeDataset.java @@ -0,0 +1,73 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_authorized_dataset] +import com.google.cloud.bigquery.Acl; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetId; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.List; + +public class AuthorizeDataset { + + public static void main(String[] args) { + // TODO(developer): Replace these variables before running the sample. + String projectId = "PROJECT_ID"; + String sourceDatasetName = "BIGQUERY_SOURCE_DATASET_NAME"; + String userDatasetName = "BIGQUERY_USER_DATASET_NAME"; + authorizeDataset( + DatasetId.of(projectId, sourceDatasetName), DatasetId.of(projectId, userDatasetName)); + } + + // This method will update sourceDataset's ACL with userDataset's ACL + public static void authorizeDataset(DatasetId sourceDatasetId, DatasetId userDatasetId) { + try { + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + + // Get both source and user dataset's references + Dataset sourceDataset = bigquery.getDataset(sourceDatasetId); + Dataset userDataset = bigquery.getDataset(userDatasetId); + + // Get the source dataset's ACL + List sourceDatasetAcl = new ArrayList<>(sourceDataset.getAcl()); + + // Add the user dataset's DatasetAccessEntry object to the existing sourceDatasetAcl + List targetTypes = ImmutableList.of("VIEWS"); + Acl.DatasetAclEntity userDatasetAclEntity = + new Acl.DatasetAclEntity(userDatasetId, targetTypes); + sourceDatasetAcl.add(Acl.of(userDatasetAclEntity)); + + // update the source dataset with user dataset's ACL + Dataset updatedSourceDataset = + sourceDataset.toBuilder().setAcl(sourceDatasetAcl).build().update(); + + System.out.printf( + "Dataset %s updated with the added authorization\n", updatedSourceDataset.getDatasetId()); + + } catch (BigQueryException e) { + System.out.println("Dataset Authorization failed due to error: \n" + e); + } + } +} +// [END bigquery_authorized_dataset] diff --git a/samples/snippets/src/main/java/com/example/bigquery/CopyMultipleTables.java b/samples/snippets/src/main/java/com/example/bigquery/CopyMultipleTables.java index 9c3c736fc3..2a7e023448 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CopyMultipleTables.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CopyMultipleTables.java @@ -32,25 +32,30 @@ public static void main(String[] args) { // TODO(developer): Replace these variables before running the sample. String destinationDatasetName = "MY_DATASET_NAME"; String destinationTableId = "MY_TABLE_NAME"; - copyMultipleTables(destinationDatasetName, destinationTableId); + String sourceTable1Id = "MY_SOURCE_TABLE_1"; + String sourceTable2Id = "MY_SOURCE_TABLE_2"; + copyMultipleTables(destinationDatasetName, destinationTableId, sourceTable1Id, sourceTable2Id); } - public static void copyMultipleTables(String destinationDatasetName, String destinationTableId) { + public static void copyMultipleTables( + String destinationDatasetName, + String destinationTableId, + String sourceTable1Id, + String sourceTable2Id) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); TableId destinationTable = TableId.of(destinationDatasetName, destinationTableId); + TableId sourceTable1 = TableId.of(destinationDatasetName, sourceTable1Id); + TableId sourceTable2 = TableId.of(destinationDatasetName, sourceTable2Id); // For more information on CopyJobConfiguration see: // https://googleapis.dev/java/google-cloud-clients/latest/com/google/cloud/bigquery/JobConfiguration.html CopyJobConfiguration configuration = CopyJobConfiguration.newBuilder( - destinationTable, - Arrays.asList( - TableId.of(destinationDatasetName, "table1"), - TableId.of(destinationDatasetName, "table2"))) + destinationTable, Arrays.asList(sourceTable1, sourceTable2)) .build(); // For more information on Job see: diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateAndQueryRepeatedRecordField.java b/samples/snippets/src/main/java/com/example/bigquery/CreateAndQueryRepeatedRecordField.java new file mode 100644 index 0000000000..2bb13eb121 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateAndQueryRepeatedRecordField.java @@ -0,0 +1,196 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_create_and_query_repeated_record] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.InsertAllRequest; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; +import com.google.cloud.bigquery.TableResult; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +// Create a table with a repeated record field and query it using an array of struct named parameter +public class CreateAndQueryRepeatedRecordField { + + private static final Field REPEATED_RECORD_FIELD_SCHEMA = + Field.newBuilder( + "Addresses", + LegacySQLTypeName.RECORD, + Field.newBuilder("Status", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("Address", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("City", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("State", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("Zip", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("NumberOfYears", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()) + .setMode(Field.Mode.REPEATED) + .build(); + private static final Schema REPEATED_RECORD_TABLE_SCHEMA = + Schema.of( + Field.newBuilder("ID", LegacySQLTypeName.STRING).setMode(Field.Mode.NULLABLE).build(), + Field.newBuilder("FirstName", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("LastName", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("DOB", LegacySQLTypeName.DATE).setMode(Field.Mode.NULLABLE).build(), + REPEATED_RECORD_FIELD_SCHEMA); + + public static void main(String[] args) { + // TODO(developer): Replace these variables before running the sample. + String datasetName = "MY_DATASET_NAME"; + String tableName = "MY_TABLE_NAME"; + createAndQueryRepeatedRecordField(datasetName, tableName); + } + + public static void createAndQueryRepeatedRecordField(String datasetName, String tableName) { + try { + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + TableId tableId = TableId.of(datasetName, tableName); + + // Create a table with a repeated record field + StandardTableDefinition tableDefinition = + StandardTableDefinition.of(REPEATED_RECORD_TABLE_SCHEMA); + TableInfo tableInfo = TableInfo.of(tableId, tableDefinition); + bigquery.create(tableInfo); + + // Insert some data + ImmutableMap.Builder builder1 = ImmutableMap.builder(); + builder1.put("ID", "1"); + builder1.put("FirstName", "first_name1"); + builder1.put("LastName", "last_name1"); + builder1.put("DOB", "1995-08-09"); + builder1.put( + "Addresses", + ImmutableList.of( + ImmutableMap.of( + "Status", "single", + "Address", "123 this lane", + "City", "Toronto", + "State", "ON", + "Zip", "1h2j34", + "NumberOfYears", "3"), + ImmutableMap.of( + "Status", "couple", + "Address", "345 that lane", + "City", "Maple", + "State", "ON", + "Zip", "1h2j34", + "NumberOfYears", "5"))); + + ImmutableMap.Builder builder2 = ImmutableMap.builder(); + builder2.put("ID", "2"); + builder2.put("FirstName", "first_name2"); + builder2.put("LastName", "last_name2"); + builder2.put("DOB", "1992-03-19"); + builder2.put( + "Addresses", + ImmutableList.of( + ImmutableMap.of( + "Status", "single", + "Address", "97 Kota lane", + "City", "Ottawa", + "State", "ON", + "Zip", "1h2j34", + "NumberOfYears", "3"), + ImmutableMap.of( + "Status", "couple", + "Address", "75 Malta lane", + "City", "Victoria", + "State", "AL", + "Zip", "1h2j34", + "NumberOfYears", "5"))); + + InsertAllRequest request = + InsertAllRequest.newBuilder(tableInfo.getTableId()) + .addRow(builder1.build()) + .addRow(builder2.build()) + .build(); + bigquery.insertAll(request); + + // Query using a named parameter + QueryParameterValue statusValue = QueryParameterValue.string("single"); + QueryParameterValue addressValue = QueryParameterValue.string("123 this lane"); + QueryParameterValue cityValue = QueryParameterValue.string("Toronto"); + QueryParameterValue stateValue = QueryParameterValue.string("ON"); + QueryParameterValue zipValue = QueryParameterValue.string("1h2j34"); + QueryParameterValue numberOfYearsValue = QueryParameterValue.string("3"); + + Map struct = new LinkedHashMap<>(); + struct.put("statusValue", statusValue); + struct.put("addressValue", addressValue); + struct.put("cityValue", cityValue); + struct.put("stateValue", stateValue); + struct.put("zipValue", zipValue); + struct.put("numberOfYearsValue", numberOfYearsValue); + QueryParameterValue recordValue = QueryParameterValue.struct(struct); + List tuples = new ArrayList<>(); + tuples.add(recordValue); + + QueryParameterValue repeatedRecord = + QueryParameterValue.array(tuples.toArray(), StandardSQLTypeName.STRUCT); + + String query = + "SELECT * FROM " + + tableId.getTable() + + ", UNNEST(@repeatedRecord) AS TEMP where TEMP IN UNNEST(addresses);"; + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setDefaultDataset(datasetName) + .setUseLegacySql(false) + .addNamedParameter("repeatedRecord", repeatedRecord) + .build(); + TableResult results = bigquery.query(queryConfig); + results + .iterateAll() + .forEach(row -> row.forEach(val -> System.out.printf("%s\n", val.toString()))); + System.out.println("Query with Array of struct parameters performed successfully."); + } catch (BigQueryException | InterruptedException e) { + System.out.println("Query not performed \n" + e.toString()); + } + } +} +// [END bigquery_create_and_query_repeated_record] diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateDataset.java b/samples/snippets/src/main/java/com/example/bigquery/CreateDataset.java index 92e73d1131..f567c8df1b 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CreateDataset.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateDataset.java @@ -37,7 +37,9 @@ public static void createDataset(String datasetName) { // once, and can be reused for multiple requests. BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build(); + String location = "US"; + + DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).setLocation(location).build(); Dataset newDataset = bigquery.create(datasetInfo); String newDatasetName = newDataset.getDatasetId().getDataset(); diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateDatasetAws.java b/samples/snippets/src/main/java/com/example/bigquery/CreateDatasetAws.java index f5b4f0cdc8..19ef0eca24 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CreateDatasetAws.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateDatasetAws.java @@ -30,7 +30,6 @@ public static void main(String[] args) { // TODO(developer): Replace these variables before running the sample. String projectId = "MY_PROJECT_ID"; String datasetName = "MY_DATASET_NAME"; - // Note: As of now location only supports aws-us-east-1 String location = "aws-us-east-1"; createDatasetAws(projectId, datasetName, location); } diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateDatasetWithRegionalEndpoint.java b/samples/snippets/src/main/java/com/example/bigquery/CreateDatasetWithRegionalEndpoint.java new file mode 100644 index 0000000000..c19a93b42f --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateDatasetWithRegionalEndpoint.java @@ -0,0 +1,49 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_create_dataset_with_regional_endpoint] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetInfo; + +public class CreateDatasetWithRegionalEndpoint { + public static void createDatasetWithRegionalEndpoint() { + BigQuery bigquery; + try { + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + bigquery = + BigQueryOptions.newBuilder() + .setHost("https://us-east4-bigquery.googleapis.com/") + .build() + .getService(); + String datasetName = "MyRegionalDataset"; + + DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build(); + + Dataset newDataset = bigquery.create(datasetInfo); + System.out.println("Region of dataset: " + newDataset.getLocation()); + bigquery.delete("MyRegionalDataset"); + } catch (BigQueryException e) { + System.out.println("Dataset was not created. \n" + e); + } + } +} +// [END bigquery_create_dataset_with_regional_endpoint] diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java b/samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java index 960f280717..96f0c3f16a 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateIamPolicy.java @@ -46,7 +46,9 @@ public static void createIamPolicy(String datasetName, String tableName) { Policy policy = bigquery.getIamPolicy(tableId); policy .toBuilder() - .addIdentity(Role.of("roles/bigquery.dataViewer"), Identity.allUsers()) + .addIdentity( + Role.of("roles/bigquery.dataViewer"), + Identity.user("example-analyst-group@google.com")) .build(); bigquery.setIamPolicy(tableId, policy); System.out.println("Iam policy created successfully"); diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateModel.java b/samples/snippets/src/main/java/com/example/bigquery/CreateModel.java index 6b45718ce7..dfc5467120 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/CreateModel.java +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateModel.java @@ -39,7 +39,7 @@ public static void main(String[] args) { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateTableTimestamp.java b/samples/snippets/src/main/java/com/example/bigquery/CreateTableTimestamp.java new file mode 100644 index 0000000000..b1336aaff4 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateTableTimestamp.java @@ -0,0 +1,59 @@ +/* + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_create_table_timestamp] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.TableDefinition; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; + +public class CreateTableTimestamp { + + public static void main(String[] args) { + // TODO(developer): Replace these variables before running the sample. + String datasetName = "MY_DATASET_NAME"; + String tableName = "MY_TABLE_NAME"; + Schema schema = + Schema.of(Field.newBuilder("timestampField", StandardSQLTypeName.TIMESTAMP).build()); + createTable(datasetName, tableName, schema); + } + + public static void createTable(String datasetName, String tableName, Schema schema) { + try { + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + + TableId tableId = TableId.of(datasetName, tableName); + TableDefinition tableDefinition = StandardTableDefinition.of(schema); + TableInfo tableInfo = TableInfo.newBuilder(tableId, tableDefinition).build(); + + bigquery.create(tableInfo); + System.out.println("Table created successfully"); + } catch (BigQueryException e) { + System.out.println("Table was not created. \n" + e); + } + } +} +// [END bigquery_create_table_timestamp] diff --git a/samples/snippets/src/main/java/com/example/bigquery/CreateTablesWithPrimaryAndForeignKeys.java b/samples/snippets/src/main/java/com/example/bigquery/CreateTablesWithPrimaryAndForeignKeys.java new file mode 100644 index 0000000000..36e32bc216 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/CreateTablesWithPrimaryAndForeignKeys.java @@ -0,0 +1,109 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_create_tables_with_primary_and_foreign_keys] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.ColumnReference; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.ForeignKey; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.PrimaryKey; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardTableDefinition; +import com.google.cloud.bigquery.TableConstraints; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.TableInfo; +import java.util.Arrays; +import java.util.Collections; + +// Create tables with primary/foreign key columns +public class CreateTablesWithPrimaryAndForeignKeys { + + private static final Schema PK_FK_SCHEMA = + Schema.of( + Field.newBuilder("ID", LegacySQLTypeName.STRING).setMode(Field.Mode.NULLABLE).build(), + Field.newBuilder("FirstName", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build(), + Field.newBuilder("LastName", LegacySQLTypeName.STRING) + .setMode(Field.Mode.NULLABLE) + .build()); + + public static void main(String[] args) { + // TODO(developer): Replace these variables before running the sample. + String datasetName = "MY_DATASET_NAME"; + String tableNamePk = "PK_TABLE"; + String tableNameFk = "FK_TABLE"; + createTablesWithPrimaryAndForeignKeys(datasetName, tableNamePk, tableNameFk); + } + + public static void createTablesWithPrimaryAndForeignKeys( + String datasetName, String tableNamePk, String tableNameFk) { + try { + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + + // TableIds referenced by foreign keys need project id to be set + TableId tableIdPk = + TableId.of(bigquery.getOptions().getProjectId(), datasetName, tableNamePk); + TableId tableIdFk = TableId.of(datasetName, tableNameFk); + + PrimaryKey primaryKey = + PrimaryKey.newBuilder().setColumns(Collections.singletonList("ID")).build(); + TableConstraints tableConstraintsPk = + TableConstraints.newBuilder().setPrimaryKey(primaryKey).build(); + + ColumnReference columnReference = + ColumnReference.newBuilder().setReferencingColumn("ID").setReferencedColumn("ID").build(); + ForeignKey foreignKey = + ForeignKey.newBuilder() + .setName("foreign_key") + .setColumnReferences(Collections.singletonList(columnReference)) + .setReferencedTable(tableIdPk) + .build(); + TableConstraints tableConstraintsFk = + TableConstraints.newBuilder().setForeignKeys(Arrays.asList(foreignKey)).build(); + + // Create a table with a primary key + StandardTableDefinition tableDefinitionPk = + StandardTableDefinition.newBuilder() + .setSchema(PK_FK_SCHEMA) + .setTableConstraints(tableConstraintsPk) + .build(); + TableInfo tableInfoPk = TableInfo.of(tableIdPk, tableDefinitionPk); + bigquery.create(tableInfoPk); + + // Create a table with a foreign key + StandardTableDefinition tableDefinitionFk = + StandardTableDefinition.newBuilder() + .setSchema(PK_FK_SCHEMA) + .setTableConstraints(tableConstraintsFk) + .build(); + TableInfo tableInfoFk = TableInfo.of(tableIdFk, tableDefinitionFk); + bigquery.create(tableInfoFk); + + System.out.println("Tables with primary and foreign keys created successfully."); + } catch (BigQueryException e) { + System.out.println("Tables not created \n" + e.toString()); + } + } +} +// [END bigquery_create_tables_with_primary_and_foreign_keys] diff --git a/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracing.java b/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracing.java new file mode 100644 index 0000000000..57ec7eb71d --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracing.java @@ -0,0 +1,85 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_enable_otel_tracing] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetInfo; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.exporter.logging.LoggingSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.util.logging.ConsoleHandler; +import java.util.logging.Logger; + +public class EnableOpenTelemetryTracing { + private static final Logger log = Logger.getLogger(EnableOpenTelemetryTracing.class.getName()); + + public static void main(String[] args) { + // Set logging to System.err. + ConsoleHandler ch = new ConsoleHandler(); + log.addHandler(ch); + + // TODO(developer): Replace values before running the sample. + final String tracerName = "Sample Tracer"; + final String datasetId = "sampleDatasetId"; + + // Create TracerProvider that exports to a logger. + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.builder(LoggingSpanExporter.create()).build()) + .setSampler(Sampler.alwaysOn()) + .build(); + + // Create global OpenTelemetry instance using the TracerProvider. + OpenTelemetry otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).build(); + + // Create Tracer instance from the OpenTelemetry object. Tracers are used to create + // Spans. There can be multiple Tracers in an OpenTelemetry instance. + Tracer tracer = otel.getTracer(tracerName); + + enableOpenTelemetry(tracer, datasetId); + } + + public static void enableOpenTelemetry(Tracer tracer, String datasetId) { + // Create BigQuery client to trace. EnableOpenTelemetryTracing and OpenTelemetryTracer must + // be set to enable tracing. + BigQueryOptions otelOptions = + BigQueryOptions.newBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build(); + BigQuery bigquery = otelOptions.getService(); + + try { + // Create dataset. + DatasetInfo info = DatasetInfo.newBuilder(datasetId).build(); + Dataset dataset = bigquery.create(info); + } catch (Exception e) { + System.out.println( + String.format("Failed to create dataset: %s: %s", e.toString(), e.getMessage())); + } finally { + bigquery.delete(datasetId); + } + } +} +// [END bigquery_enable_otel_tracing] diff --git a/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpan.java b/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpan.java new file mode 100644 index 0000000000..af69df10ba --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpan.java @@ -0,0 +1,105 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_enable_otel_tracing_with_parent_span] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.Dataset; +import com.google.cloud.bigquery.DatasetInfo; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import io.opentelemetry.exporter.logging.LoggingSpanExporter; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.time.LocalDate; +import java.util.logging.ConsoleHandler; +import java.util.logging.Logger; + +public class EnableOpenTelemetryTracingWithParentSpan { + private static final Logger log = + Logger.getLogger(EnableOpenTelemetryTracingWithParentSpan.class.getName()); + + public static void main(String[] args) { + // Set logging to System.err. + ConsoleHandler ch = new ConsoleHandler(); + log.addHandler(ch); + + // TODO(developer): Replace values before running the sample. + final String tracerName = "Sample Tracer"; + final String parentSpanName = "Sample Parent Span"; + final String datasetId = "sampleDatasetId"; + + // Create TracerProvider that exports to a logger. + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.builder(LoggingSpanExporter.create()).build()) + .setSampler(Sampler.alwaysOn()) + .build(); + + // Create OpenTelemetry instance using the TracerProvider. + OpenTelemetry otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).build(); + + // Create Tracer instance from the global OpenTelemetry object. Tracers are used to create + // Spans. There can be multiple Tracers in a global OpenTelemetry instance. + final Tracer tracer = otel.getTracer(tracerName); + enableOpenTelemetryWithParentSpan(tracer, parentSpanName, datasetId); + } + + public static void enableOpenTelemetryWithParentSpan( + Tracer tracer, String parentSpanName, String datasetId) { + // Create BigQuery client to trace. EnableOpenTelemetryTracing and OpenTelemetryTracer must + // be set to enable tracing. + BigQueryOptions otelOptions = + BigQueryOptions.newBuilder() + .setEnableOpenTelemetryTracing(true) + .setOpenTelemetryTracer(tracer) + .build(); + BigQuery bigquery = otelOptions.getService(); + + LocalDate currentDate = LocalDate.now(); + + // Create the root parent Span. setNoParent() ensures that it is a parent Span with a Span ID + // of 0. + Span parentSpan = + tracer + .spanBuilder(parentSpanName) + .setNoParent() + .setAttribute("current_date", currentDate.toString()) + .startSpan(); + + // The Span Context is automatically passed on to any functions called within the scope of the + // try block. parentSpan.makeCurrent() sets parentSpan to be the parent of any Spans created in + // this scope, or the scope of any functions called within this scope. + try (Scope parentScope = parentSpan.makeCurrent()) { + DatasetInfo info = DatasetInfo.newBuilder(datasetId).build(); + Dataset dataset = bigquery.create(info); + } catch (Exception e) { + System.out.println( + String.format("Failed to create dataset: %s: %s", e.toString(), e.getMessage())); + } finally { + // finally block ensures that Spans are cleaned up properly. + parentSpan.end(); + bigquery.delete(datasetId); + } + } +} +// [END bigquery_enable_otel_tracing_with_parent_span] diff --git a/samples/snippets/src/main/java/com/example/bigquery/GrantViewAccess.java b/samples/snippets/src/main/java/com/example/bigquery/GrantViewAccess.java index 4a989f0247..00f5ccb1c4 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/GrantViewAccess.java +++ b/samples/snippets/src/main/java/com/example/bigquery/GrantViewAccess.java @@ -22,7 +22,6 @@ import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; import com.google.cloud.bigquery.Dataset; -import com.google.cloud.bigquery.DatasetId; import com.google.cloud.bigquery.Table; import java.util.ArrayList; import java.util.List; @@ -44,8 +43,8 @@ public static void grantViewAccess(String srcDatasetId, String viewDatasetId, St // once, and can be reused for multiple requests. BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - Dataset srcDataset = bigquery.getDataset(DatasetId.of(srcDatasetId)); - Dataset viewDataset = bigquery.getDataset(DatasetId.of(viewDatasetId)); + Dataset srcDataset = bigquery.getDataset(srcDatasetId); + Dataset viewDataset = bigquery.getDataset(viewDatasetId); Table view = viewDataset.get(viewId); // First, we'll add a group to the ACL for the dataset containing the view. This will allow // users within that group to query the view, but they must have direct access to any tables diff --git a/samples/snippets/src/main/java/com/example/bigquery/InsertingDataTypes.java b/samples/snippets/src/main/java/com/example/bigquery/InsertingDataTypes.java index 3d721e727d..0f2946488e 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/InsertingDataTypes.java +++ b/samples/snippets/src/main/java/com/example/bigquery/InsertingDataTypes.java @@ -30,6 +30,7 @@ import com.google.cloud.bigquery.TableDefinition; import com.google.cloud.bigquery.TableId; import com.google.cloud.bigquery.TableInfo; +import java.util.Base64; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -53,10 +54,7 @@ public static void insertingDataTypes(String datasetName, String tableName) { // Inserting data types Field name = Field.of("name", StandardSQLTypeName.STRING); Field age = Field.of("age", StandardSQLTypeName.INT64); - Field school = - Field.newBuilder("school", StandardSQLTypeName.BYTES) - .setMode(Field.Mode.REPEATED) - .build(); + Field school = Field.of("school", StandardSQLTypeName.BYTES); Field location = Field.of("location", StandardSQLTypeName.GEOGRAPHY); Field measurements = Field.newBuilder("measurements", StandardSQLTypeName.FLOAT64) @@ -86,7 +84,7 @@ public static void insertingDataTypes(String datasetName, String tableName) { Map rowContent = new HashMap<>(); rowContent.put("name", "Tom"); rowContent.put("age", 30); - rowContent.put("school", "Test University".getBytes()); + rowContent.put("school", Base64.getEncoder().encodeToString("Test University".getBytes())); rowContent.put("location", "POINT(1 2)"); rowContent.put("measurements", new Float[] {50.05f, 100.5f}); rowContent.put("datesTime", datesTimeContent); diff --git a/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFileInSession.java b/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFileInSession.java new file mode 100644 index 0000000000..8efef5c405 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/LoadLocalFileInSession.java @@ -0,0 +1,163 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.ConnectionProperty; +import com.google.cloud.bigquery.CsvOptions; +import com.google.cloud.bigquery.FormatOptions; +import com.google.cloud.bigquery.Job; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.JobStatistics.LoadStatistics; +import com.google.cloud.bigquery.TableDataWriteChannel; +import com.google.cloud.bigquery.TableId; +import com.google.cloud.bigquery.WriteChannelConfiguration; +import com.google.common.collect.ImmutableList; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.channels.Channels; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.UUID; + +public class LoadLocalFileInSession { + + public static void main(String[] args) throws IOException, InterruptedException { + // Use _SESSION if the table is a temporary table + String datasetName = "MY_DATASET_NAME"; + String tableName = "MY_TABLE_NAME"; + Path csvPath = FileSystems.getDefault().getPath(".", "my-data.csv"); + String sessionId = + createSessionForLoading(datasetName, tableName, csvPath, FormatOptions.csv()); + loadLocalFileInSession(datasetName, tableName, csvPath, FormatOptions.csv(), sessionId); + } + + // [START bigquery_load_from_file_create_session] + + public static String createSessionForLoading( + String datasetName, String tableName, Path csvPath, CsvOptions formatOptions) + throws IOException, InterruptedException { + LoadStatistics loadStatistics = null; + try { + + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + + TableId tableId = TableId.of(datasetName, tableName); + + // Enable createSession in the configuration + WriteChannelConfiguration writeChannelConfiguration = + WriteChannelConfiguration.newBuilder(tableId) + .setFormatOptions(formatOptions) + .setCreateSession(true) + .build(); + + // The location and JobName must be specified; other fields can be auto-detected. + String jobName = "jobId_" + UUID.randomUUID().toString(); + JobId jobId = JobId.newBuilder().setLocation("us").setJob(jobName).build(); + + // Imports a local file into a table. + try (TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration); + OutputStream stream = Channels.newOutputStream(writer)) { + Files.copy(csvPath, stream); + } + + // Get the Job created by the TableDataWriteChannel and wait for it to complete. + // Then retrieve the session ID + Job job = bigquery.getJob(jobId); + Job completedJob = job.waitFor(); + loadStatistics = completedJob.getStatistics(); + if (completedJob == null) { + System.out.println("Job not executed since it no longer exists."); + return ""; + } else if (completedJob.getStatus().getError() != null) { + System.out.println( + "BigQuery was unable to load local file to the table due to an error: \n" + + job.getStatus().getError()); + return ""; + } + + } catch (BigQueryException e) { + System.out.println("Local file not loaded. \n" + e.toString()); + } + return loadStatistics.getSessionInfo().getSessionId(); + } + + // [END bigquery_load_from_file_create_session] + + // [START bigquery_load_from_file_with_session] + public static void loadLocalFileInSession( + String datasetName, + String tableName, + Path csvPath, + FormatOptions formatOptions, + String sessionId) + throws IOException, InterruptedException { + try { + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + TableId tableId = TableId.of(datasetName, tableName); + + // Create ConnectionProperty with sessionID + ConnectionProperty sessionConnectionProperty = + ConnectionProperty.newBuilder().setKey("session_id").setValue(sessionId).build(); + + // Set the Connection Property with the SessionID in the Configuration. + WriteChannelConfiguration writeChannelConfiguration = + WriteChannelConfiguration.newBuilder(tableId) + .setFormatOptions(formatOptions) + .setConnectionProperties(ImmutableList.of(sessionConnectionProperty)) + .build(); + + // The location and JobName must be specified; other fields can be auto-detected. + String jobName = "jobId_" + UUID.randomUUID().toString(); + JobId jobId = JobId.newBuilder().setLocation("us").setJob(jobName).build(); + + // Imports a local file into a table. + try (TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration); + OutputStream stream = Channels.newOutputStream(writer)) { + Files.copy(csvPath, stream); + } + + // Get the Job created by the TableDataWriteChannel and wait for it to complete. + Job job = bigquery.getJob(jobId); + Job completedJob = job.waitFor(); + if (completedJob == null) { + System.out.println("Job not executed since it no longer exists."); + return; + } else if (completedJob.getStatus().getError() != null) { + System.out.println( + "BigQuery was unable to load local file to the table due to an error: \n" + + job.getStatus().getError()); + return; + } + + // Get output status + LoadStatistics stats = job.getStatistics(); + System.out.printf( + "Successfully loaded to Session %s. \n", stats.getSessionInfo().getSessionId()); + } catch (BigQueryException e) { + System.out.println("Local file not loaded. \n" + e.toString()); + } + } + // [END bigquery_load_from_file_with_session] +} diff --git a/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtablePerm.java b/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtablePerm.java index 9162697075..fa5c30c16b 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtablePerm.java +++ b/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtablePerm.java @@ -17,6 +17,8 @@ package com.example.bigquery; // [START bigquery_query_external_bigtable_perm] + +import com.google.api.client.util.Base64; import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; @@ -29,7 +31,6 @@ import com.google.cloud.bigquery.TableInfo; import com.google.cloud.bigquery.TableResult; import com.google.common.collect.ImmutableList; -import org.apache.commons.codec.binary.Base64; // Sample to queries an external bigtable data source using a permanent table public class QueryExternalBigtablePerm { diff --git a/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtableTemp.java b/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtableTemp.java index 51646fcb67..6a04631074 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtableTemp.java +++ b/samples/snippets/src/main/java/com/example/bigquery/QueryExternalBigtableTemp.java @@ -17,6 +17,8 @@ package com.example.bigquery; // [START bigquery_query_external_bigtable_temp] + +import com.google.api.client.util.Base64; import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; @@ -27,7 +29,6 @@ import com.google.cloud.bigquery.QueryJobConfiguration; import com.google.cloud.bigquery.TableResult; import com.google.common.collect.ImmutableList; -import org.apache.commons.codec.binary.Base64; // Sample to queries an external bigtable data source using a temporary table public class QueryExternalBigtableTemp { diff --git a/samples/snippets/src/main/java/com/example/bigquery/QueryJobOptional.java b/samples/snippets/src/main/java/com/example/bigquery/QueryJobOptional.java new file mode 100644 index 0000000000..1abf580671 --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/QueryJobOptional.java @@ -0,0 +1,78 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_query_job_optional] +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.JobId; +import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryJobConfiguration.JobCreationMode; +import com.google.cloud.bigquery.TableResult; + +// Sample demonstrating short mode query execution. +// +// This feature is controlled by setting the defaultJobCreationMode +// field in the BigQueryOptions used for the client. JOB_CREATION_OPTIONAL +// allows for the execution of queries without creating a job. +public class QueryJobOptional { + + public static void main(String[] args) { + String query = + "SELECT name, gender, SUM(number) AS total FROM " + + "bigquery-public-data.usa_names.usa_1910_2013 GROUP BY " + + "name, gender ORDER BY total DESC LIMIT 10"; + queryJobOptional(query); + } + + public static void queryJobOptional(String query) { + try { + // Initialize client that will be used to send requests. This client only needs + // to be created once, and can be reused for multiple requests. + BigQueryOptions options = BigQueryOptions.getDefaultInstance(); + options.setDefaultJobCreationMode(JobCreationMode.JOB_CREATION_OPTIONAL); + BigQuery bigquery = options.getService(); + + // Execute the query. The returned TableResult provides access information + // about the query execution as well as query results. + TableResult results = bigquery.query(QueryJobConfiguration.of(query)); + + JobId jobId = results.getJobId(); + if (jobId != null) { + System.out.println("Query was run with job state. Job ID: " + jobId.toString()); + } else { + System.out.println("Query was run in short mode. Query ID: " + results.getQueryId()); + } + + // Print the results. + results + .iterateAll() + .forEach( + row -> { + System.out.print("name:" + row.get("name").getStringValue()); + System.out.print(", gender: " + row.get("gender").getStringValue()); + System.out.print(", total: " + row.get("total").getLongValue()); + System.out.println(); + }); + + } catch (BigQueryException | InterruptedException e) { + System.out.println("Query not performed \n" + e.toString()); + } + } +} +// [END bigquery_query_job_optional] diff --git a/samples/snippets/src/main/java/com/example/bigquery/QueryPagination.java b/samples/snippets/src/main/java/com/example/bigquery/QueryPagination.java index 190c9bcdef..86915bab77 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/QueryPagination.java +++ b/samples/snippets/src/main/java/com/example/bigquery/QueryPagination.java @@ -59,15 +59,15 @@ public static void queryPagination(String datasetName, String tableName, String // First Page results - .iterateAll() - .forEach(row -> row.forEach(val -> System.out.printf("%s,", val.toString()))); + .getValues() + .forEach(row -> row.forEach(val -> System.out.printf("%s,\n", val.toString()))); - if (results.hasNextPage()) { - // Next Page + while (results.hasNextPage()) { + // Remaining Pages + results = results.getNextPage(); results - .getNextPage() - .iterateAll() - .forEach(row -> row.forEach(val -> System.out.printf("%s,", val.toString()))); + .getValues() + .forEach(row -> row.forEach(val -> System.out.printf("%s,\n", val.toString()))); } System.out.println("Query pagination performed successfully."); diff --git a/samples/snippets/src/main/java/com/example/bigquery/Query.java b/samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayOfStructsNamedParameters.java similarity index 51% rename from samples/snippets/src/main/java/com/example/bigquery/Query.java rename to samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayOfStructsNamedParameters.java index 8b293cd0b5..3a2074f9ac 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/Query.java +++ b/samples/snippets/src/main/java/com/example/bigquery/QueryWithArrayOfStructsNamedParameters.java @@ -1,5 +1,5 @@ /* - * Copyright 2020 Google LLC + * Copyright 2023 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,55 +16,53 @@ package com.example.bigquery; -// [START bigquery_query] +// [START bigquery_query_array_structs_params_named] import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; import com.google.cloud.bigquery.QueryJobConfiguration; +import com.google.cloud.bigquery.QueryParameterValue; +import com.google.cloud.bigquery.StandardSQLTypeName; import com.google.cloud.bigquery.TableResult; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; -// Sample to query in a table -public class Query { +public class QueryWithArrayOfStructsNamedParameters { public static void main(String[] args) { - // TODO(developer): Replace these variables before running the sample. - String projectId = "MY_PROJECT_ID"; - String datasetName = "MY_DATASET_NAME"; - String tableName = "MY_TABLE_NAME"; - String query = - "SELECT name, SUM(number) as total_people\n" - + " FROM `" - + projectId - + "." - + datasetName - + "." - + tableName - + "`" - + " WHERE state = 'TX'" - + " GROUP BY name, state" - + " ORDER BY total_people DESC" - + " LIMIT 20"; - query(query); + queryWithArrayOfStructsNamedParameters(); } - public static void query(String query) { + public static void queryWithArrayOfStructsNamedParameters() { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build(); + Map structMap = new HashMap<>(); + structMap.put("stringField", QueryParameterValue.string("test-stringField")); + QueryParameterValue structQueryParam = QueryParameterValue.struct(structMap); + List arrayOfStructs = new ArrayList<>(); + arrayOfStructs.add(structQueryParam); + String query = "SELECT (@arrayOfStructField) AS record"; + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .setUseLegacySql(false) + .addNamedParameter( + "arrayOfStructField", + QueryParameterValue.array(arrayOfStructs.toArray(), StandardSQLTypeName.STRUCT)) + .build(); TableResult results = bigquery.query(queryConfig); - results .iterateAll() - .forEach(row -> row.forEach(val -> System.out.printf("%s,", val.toString()))); - - System.out.println("Query performed successfully."); + .forEach(row -> row.forEach(val -> System.out.printf("%s", val.toString()))); + System.out.println("Query with Array of struct parameters performed successfully."); } catch (BigQueryException | InterruptedException e) { System.out.println("Query not performed \n" + e.toString()); } } } -// [END bigquery_query] +// [END bigquery_query_array_structs_params_named] diff --git a/samples/snippets/src/main/java/com/example/bigquery/QueryWithStructsParameters.java b/samples/snippets/src/main/java/com/example/bigquery/QueryWithStructsParameters.java index 10ef6c316c..5cf3ebeb69 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/QueryWithStructsParameters.java +++ b/samples/snippets/src/main/java/com/example/bigquery/QueryWithStructsParameters.java @@ -40,15 +40,13 @@ public static void queryWithStructsParameters() { // Create struct Map struct = new HashMap<>(); - struct.put("booleanField", QueryParameterValue.bool(true)); - struct.put("integerField", QueryParameterValue.string("test-stringField")); - struct.put("stringField", QueryParameterValue.int64(10)); + struct.put("x", QueryParameterValue.int64(1)); + struct.put("y", QueryParameterValue.string("foo")); QueryParameterValue recordValue = QueryParameterValue.struct(struct); - String query = "SELECT STRUCT(@recordField) AS record"; + String query = "SELECT STRUCT(@recordField) AS s"; QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query) - .setUseLegacySql(false) .addNamedParameter("recordField", recordValue) .build(); diff --git a/samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java b/samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java index 56a3fcea91..6f20b9801d 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java +++ b/samples/snippets/src/main/java/com/example/bigquery/QueryWithTimestampParameters.java @@ -30,8 +30,36 @@ // Sample to running a query with timestamp query parameters. public class QueryWithTimestampParameters { - public static void main(String[] args) { - queryWithTimestampParameters(); + public static void queryFromTableTimestampParameters() { + try { + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + + ZonedDateTime timestamp = LocalDateTime.of(2016, 12, 7, 8, 0, 0).atZone(ZoneOffset.UTC); + String query = "SELECT last_reported FROM " + + "`bigquery-public-data`.new_york_citibike.citibike_stations" + + " WHERE last_reported >= @ts_value LIMIT 5"; + // Note: Standard SQL is required to use query parameters. + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder(query) + .addNamedParameter( + "ts_value", + QueryParameterValue.timestamp( + // Timestamp takes microseconds since 1970-01-01T00:00:00 UTC + timestamp.toInstant().toEpochMilli() * 1000)) + .build(); + + TableResult results = bigquery.query(queryConfig); + + results + .iterateAll() + .forEach(row -> row.forEach(val -> System.out.printf("%s\n", val.toString()))); + + System.out.println("Query with timestamp parameter performed successfully."); + } catch (BigQueryException | InterruptedException e) { + System.out.println("Query not performed \n" + e); + } } public static void queryWithTimestampParameters() { @@ -60,7 +88,7 @@ public static void queryWithTimestampParameters() { System.out.println("Query with timestamp parameter performed successfully."); } catch (BigQueryException | InterruptedException e) { - System.out.println("Query not performed \n" + e.toString()); + System.out.println("Query not performed \n" + e); } } } diff --git a/samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java b/samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java index cc294f7a5f..a530885cf5 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java +++ b/samples/snippets/src/main/java/com/example/bigquery/ResourceCleanUp.java @@ -54,6 +54,7 @@ public static void main(String[] args) { || datasetName.contains("gcloud_test_") || datasetName.contains("SHARED_DATASET_TEST_") || datasetName.contains("WRITE_STREAM_TEST")) + || datasetName.contains("MY_VIEW_DATASET_NAME_TEST_") && dataset.getCreationTime() > sixHourAgo) { System.out.format("\tDeleting Dataset: %s\n", datasetName); bigquery.delete( diff --git a/samples/snippets/src/main/java/com/example/bigquery/SetUserAgent.java b/samples/snippets/src/main/java/com/example/bigquery/SetUserAgent.java new file mode 100644 index 0000000000..2794305f6a --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/SetUserAgent.java @@ -0,0 +1,61 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_set_user_agent] +import com.google.api.gax.rpc.FixedHeaderProvider; +import com.google.api.gax.rpc.HeaderProvider; +import com.google.auth.oauth2.GoogleCredentials; +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.common.collect.ImmutableMap; +import java.io.IOException; + +public class SetUserAgent { + + private static final String USER_AGENT_HEADER = "user-agent"; + + public static void main(String[] args) throws IOException { + // TODO(developer): Replace these variables before running the sample. + String projectId = "my-project-id"; + String customUserAgentValue = "my-custom-user-agent-value"; + setUserAgent(projectId, customUserAgentValue); + } + + public static void setUserAgent(String projectId, String customUserAgentValue) + throws IOException { + // Setup the credentials + GoogleCredentials googleCredentials = GoogleCredentials.getApplicationDefault(); + + // Initialize the HeaderProvider object with custom user agent value + HeaderProvider headerProvider = + FixedHeaderProvider.create(ImmutableMap.of(USER_AGENT_HEADER, customUserAgentValue)); + + // Initialize client that will be used to send requests. This client only needs to be created + // once, and can be reused for multiple requests. + BigQuery bigQuery = + BigQueryOptions.newBuilder() + .setProjectId(projectId) + .setCredentials(googleCredentials) + .setHeaderProvider(headerProvider) + .build() + .getService(); + + System.out.println(bigQuery.getOptions().getUserAgent()); + } +} +// [END bigquery_set_user_agent] diff --git a/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java b/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java index dfabf71f30..7b72bb0f94 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java +++ b/samples/snippets/src/main/java/com/example/bigquery/SimpleApp.java @@ -20,6 +20,7 @@ // [START bigquery_simple_app_deps] import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; import com.google.cloud.bigquery.FieldValueList; import com.google.cloud.bigquery.Job; @@ -27,60 +28,67 @@ import com.google.cloud.bigquery.JobInfo; import com.google.cloud.bigquery.QueryJobConfiguration; import com.google.cloud.bigquery.TableResult; -import java.util.UUID; // [END bigquery_simple_app_deps] public class SimpleApp { + public static void main(String... args) throws Exception { - // [START bigquery_simple_app_client] - BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); - // [END bigquery_simple_app_client] - // [START bigquery_simple_app_query] - QueryJobConfiguration queryConfig = - QueryJobConfiguration.newBuilder( - "SELECT commit, author, repo_name " - + "FROM `bigquery-public-data.github_repos.commits` " - + "WHERE subject like '%bigquery%' " - + "ORDER BY subject DESC LIMIT 10") - // Use standard SQL syntax for queries. - // See: https://cloud.google.com/bigquery/sql-reference/ - .setUseLegacySql(false) - .build(); + // TODO(developer): Replace these variables before running the app. + String projectId = "MY_PROJECT_ID"; + simpleApp(projectId); + } - // Create a job ID so that we can safely retry. - JobId jobId = JobId.of(UUID.randomUUID().toString()); - Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); + public static void simpleApp(String projectId) { + try { + // [START bigquery_simple_app_client] + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + // [END bigquery_simple_app_client] + // [START bigquery_simple_app_query] + QueryJobConfiguration queryConfig = + QueryJobConfiguration.newBuilder( + "SELECT CONCAT('https://stackoverflow.com/questions/', " + + "CAST(id as STRING)) as url, view_count " + + "FROM `bigquery-public-data.stackoverflow.posts_questions` " + + "WHERE tags like '%google-bigquery%' " + + "ORDER BY view_count DESC " + + "LIMIT 10") + // Use standard SQL syntax for queries. + // See: https://cloud.google.com/bigquery/sql-reference/ + .setUseLegacySql(false) + .build(); - // Wait for the query to complete. - queryJob = queryJob.waitFor(); + JobId jobId = JobId.newBuilder().setProject(projectId).build(); + Job queryJob = bigquery.create(JobInfo.newBuilder(queryConfig).setJobId(jobId).build()); - // Check for errors - if (queryJob == null) { - throw new RuntimeException("Job no longer exists"); - } else if (queryJob.getStatus().getError() != null) { - // You can also look at queryJob.getStatus().getExecutionErrors() for all - // errors, not just the latest one. - throw new RuntimeException(queryJob.getStatus().getError().toString()); - } - // [END bigquery_simple_app_query] + // Wait for the query to complete. + queryJob = queryJob.waitFor(); + + // Check for errors + if (queryJob == null) { + throw new RuntimeException("Job no longer exists"); + } else if (queryJob.getStatus().getExecutionErrors() != null + && queryJob.getStatus().getExecutionErrors().size() > 0) { + // TODO(developer): Handle errors here. An error here do not necessarily mean that the job + // has completed or was unsuccessful. + // For more details: https://cloud.google.com/bigquery/troubleshooting-errors + throw new RuntimeException("An unhandled error has occurred"); + } + // [END bigquery_simple_app_query] - // [START bigquery_simple_app_print] - // Get the results. - TableResult result = queryJob.getQueryResults(); + // [START bigquery_simple_app_print] + // Get the results. + TableResult result = queryJob.getQueryResults(); - // Print all pages of the results. - for (FieldValueList row : result.iterateAll()) { - // String type - String commit = row.get("commit").getStringValue(); - // Record type - FieldValueList author = row.get("author").getRecordValue(); - String name = author.get("name").getStringValue(); - String email = author.get("email").getStringValue(); - // String Repeated type - String repoName = row.get("repo_name").getRecordValue().get(0).getStringValue(); - System.out.printf( - "Repo name: %s Author name: %s email: %s commit: %s\n", repoName, name, email, commit); + // Print all pages of the results. + for (FieldValueList row : result.iterateAll()) { + // String type + String url = row.get("url").getStringValue(); + String viewCount = row.get("view_count").getStringValue(); + System.out.printf("%s : %s views\n", url, viewCount); + } + } catch (BigQueryException | InterruptedException e) { + System.out.println("Simple App failed due to error: \n" + e.toString()); } // [END bigquery_simple_app_print] } diff --git a/samples/snippets/src/main/java/com/example/bigquery/SimpleQuery.java b/samples/snippets/src/main/java/com/example/bigquery/SimpleQuery.java index 1aeaff1955..429a339cf5 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/SimpleQuery.java +++ b/samples/snippets/src/main/java/com/example/bigquery/SimpleQuery.java @@ -17,6 +17,7 @@ package com.example.bigquery; // [START bigquery_query] + import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; @@ -27,7 +28,9 @@ public class SimpleQuery { public static void main(String[] args) { // TODO(developer): Replace this query before running the sample. - String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; + String query = + "SELECT corpus, count(*) as corpus_count " + + "FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; simpleQuery(query); } @@ -44,7 +47,14 @@ public static void simpleQuery(String query) { TableResult result = bigquery.query(queryConfig); // Print the results. - result.iterateAll().forEach(rows -> rows.forEach(row -> System.out.println(row.getValue()))); + result + .iterateAll() + .forEach( + row -> { + System.out.print("corpus:" + row.get("corpus").getStringValue()); + System.out.print(", count:" + row.get("corpus_count").getLongValue()); + System.out.println(); + }); System.out.println("Query ran successfully"); } catch (BigQueryException | InterruptedException e) { diff --git a/samples/snippets/src/main/java/com/example/bigquery/SimpleQueryConnectionReadApi.java b/samples/snippets/src/main/java/com/example/bigquery/SimpleQueryConnectionReadApi.java new file mode 100644 index 0000000000..970c29a2ef --- /dev/null +++ b/samples/snippets/src/main/java/com/example/bigquery/SimpleQueryConnectionReadApi.java @@ -0,0 +1,66 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +// [START bigquery_simple_query_connection_read_api] + +import com.google.cloud.bigquery.BigQuery; +import com.google.cloud.bigquery.BigQueryOptions; +import com.google.cloud.bigquery.BigQueryResult; +import com.google.cloud.bigquery.Connection; +import com.google.cloud.bigquery.ConnectionSettings; +import java.sql.ResultSet; +import java.sql.SQLException; + +public class SimpleQueryConnectionReadApi { + + public static void main(String[] args) { + String query = + "SELECT corpus, count(*) as corpus_count " + + "FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; + simpleQueryConnectionReadApi(query); + } + + public static void simpleQueryConnectionReadApi(String query) { + + try { + // Initialize client and create a Connection session. + BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); + ConnectionSettings connectionSettings = + ConnectionSettings.newBuilder() + .setRequestTimeout(10L) + .setMaxResults(100L) + .setUseQueryCache(true) + .build(); + Connection connection = bigquery.createConnection(connectionSettings); + + // Execute the query using the Connection session. + BigQueryResult bigQueryResult = connection.executeSelect(query); + ResultSet resultSet = bigQueryResult.getResultSet(); + + while (resultSet.next()) { + System.out.print("corpus:" + resultSet.getString("corpus")); + System.out.print(", count:" + resultSet.getLong("corpus_count")); + System.out.println(); + } + System.out.println("Query ran successfully"); + } catch (SQLException e) { + System.out.println("Query did not run \n" + e.toString()); + } + } +} +// [END bigquery_simple_query_connection_read_api] diff --git a/samples/snippets/src/main/java/com/example/bigquery/TableExists.java b/samples/snippets/src/main/java/com/example/bigquery/TableExists.java index 0775c2548b..0447fe3de1 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/TableExists.java +++ b/samples/snippets/src/main/java/com/example/bigquery/TableExists.java @@ -40,7 +40,10 @@ public static void tableExists(String datasetName, String tableName) { BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService(); Table table = bigquery.getTable(TableId.of(datasetName, tableName)); - if (table.exists()) { + if (table != null + && table + .exists()) { // table will be null if it is not found and setThrowNotFound is not set + // to `true` System.out.println("Table already exist"); } else { System.out.println("Table not found"); diff --git a/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java b/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java index b5363d47bb..05bcee6d9a 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java +++ b/samples/snippets/src/main/java/com/example/bigquery/TableInsertRows.java @@ -39,12 +39,13 @@ public static void main(String[] args) { Map rowContent = new HashMap<>(); rowContent.put("booleanField", true); rowContent.put("numericField", "3.14"); - - tableInsertRows(datasetName, tableName, rowContent); + // TODO(developer): Replace the row id with a unique value for each row. + String rowId = "ROW_ID"; + tableInsertRows(datasetName, tableName, rowId, rowContent); } public static void tableInsertRows( - String datasetName, String tableName, Map rowContent) { + String datasetName, String tableName, String rowId, Map rowContent) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -58,9 +59,8 @@ public static void tableInsertRows( bigquery.insertAll( InsertAllRequest.newBuilder(tableId) // More rows can be added in the same RPC by invoking .addRow() on the builder. - // You can also supply optional unique row keys to support de-duplication - // scenarios. - .addRow(rowContent) + // You can omit the unique row ids to disable de-duplication. + .addRow(rowId, rowContent) .build()); if (response.hasErrors()) { diff --git a/samples/snippets/src/main/java/com/example/bigquery/TableInsertRowsWithoutRowIds.java b/samples/snippets/src/main/java/com/example/bigquery/TableInsertRowsWithoutRowIds.java index 1befb593da..1f4def54c0 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/TableInsertRowsWithoutRowIds.java +++ b/samples/snippets/src/main/java/com/example/bigquery/TableInsertRowsWithoutRowIds.java @@ -54,6 +54,8 @@ public static void tableInsertRowsWithoutRowIds(String datasetName, String table InsertAllResponse response = bigquery.insertAll( InsertAllRequest.newBuilder(TableId.of(datasetName, tableName)) + // No row ids disable de-duplication, and also disable the retries in the Java + // library. .setRows( ImmutableList.of( InsertAllRequest.RowToInsert.of(rowContent1), diff --git a/samples/snippets/src/main/java/com/example/bigquery/AlterMaterializedView.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateMaterializedView.java similarity index 79% rename from samples/snippets/src/main/java/com/example/bigquery/AlterMaterializedView.java rename to samples/snippets/src/main/java/com/example/bigquery/UpdateMaterializedView.java index 8ce25196f3..4de1273443 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/AlterMaterializedView.java +++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateMaterializedView.java @@ -16,7 +16,7 @@ package com.example.bigquery; -// [START bigquery_alter_materialized_view] +// [START bigquery_update_materialized_view] import com.google.cloud.bigquery.BigQuery; import com.google.cloud.bigquery.BigQueryException; import com.google.cloud.bigquery.BigQueryOptions; @@ -24,17 +24,17 @@ import com.google.cloud.bigquery.Table; import com.google.cloud.bigquery.TableId; -// Sample to alter materialized view -public class AlterMaterializedView { +// Sample to update materialized view +public class UpdateMaterializedView { public static void main(String[] args) { // TODO(developer): Replace these variables before running the sample. String datasetName = "MY_DATASET_NAME"; String materializedViewName = "MY_MATERIALIZED_VIEW_NAME"; - alterMaterializedView(datasetName, materializedViewName); + updateMaterializedView(datasetName, materializedViewName); } - public static void alterMaterializedView(String datasetName, String materializedViewName) { + public static void updateMaterializedView(String datasetName, String materializedViewName) { try { // Initialize client that will be used to send requests. This client only needs to be created // once, and can be reused for multiple requests. @@ -45,17 +45,17 @@ public static void alterMaterializedView(String datasetName, String materialized // Get existing materialized view Table table = bigquery.getTable(tableId); MaterializedViewDefinition materializedViewDefinition = table.getDefinition(); - // Alter materialized view + // Update materialized view materializedViewDefinition .toBuilder() .setEnableRefresh(true) .setRefreshIntervalMs(1000L) .build(); table.toBuilder().setDefinition(materializedViewDefinition).build().update(); - System.out.println("Materialized view altered successfully"); + System.out.println("Materialized view updated successfully"); } catch (BigQueryException e) { - System.out.println("Materialized view was not altered. \n" + e.toString()); + System.out.println("Materialized view was not updated. \n" + e.toString()); } } } -// [END bigquery_alter_materialized_view] +// [END bigquery_update_materialized_view] diff --git a/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java b/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java index e66353741e..a0a1ebd770 100644 --- a/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java +++ b/samples/snippets/src/main/java/com/example/bigquery/UpdateTableExpiration.java @@ -30,7 +30,8 @@ public static void main(String[] args) { String datasetName = "MY_DATASET_NAME"; String tableName = "MY_TABLE_NAME"; // Update table expiration to one day. - Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); + Long newExpiration = + TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS) + System.currentTimeMillis(); updateTableExpiration(datasetName, tableName, newExpiration); } diff --git a/samples/snippets/src/test/java/com/example/bigquery/AuthorizeDatasetIT.java b/samples/snippets/src/test/java/com/example/bigquery/AuthorizeDatasetIT.java new file mode 100644 index 0000000000..c4facd5ef4 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/AuthorizeDatasetIT.java @@ -0,0 +1,85 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import com.google.cloud.bigquery.DatasetId; +import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class AuthorizeDatasetIT { + private final Logger log = Logger.getLogger(this.getClass().getName()); + private String userDatasetName; + private String srcDatasetName; + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + private static final String GOOGLE_CLOUD_PROJECT = System.getenv("GOOGLE_CLOUD_PROJECT"); + private DatasetId sourceDatasetId; + private DatasetId userDatasetId; + + private static void requireEnvVar(String varName) { + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("GOOGLE_CLOUD_PROJECT"); + } + + @Before + public void setUp() { + userDatasetName = RemoteBigQueryHelper.generateDatasetName(); + srcDatasetName = RemoteBigQueryHelper.generateDatasetName(); + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + CreateDataset.createDataset(userDatasetName); + CreateDataset.createDataset(srcDatasetName); + userDatasetId = DatasetId.of(GOOGLE_CLOUD_PROJECT, userDatasetName); + sourceDatasetId = DatasetId.of(GOOGLE_CLOUD_PROJECT, srcDatasetName); + } + + @After + public void tearDown() { + // Clean up + DeleteDataset.deleteDataset(GOOGLE_CLOUD_PROJECT, userDatasetName); + DeleteDataset.deleteDataset(GOOGLE_CLOUD_PROJECT, srcDatasetName); + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testCreateDataset() { + AuthorizeDataset.authorizeDataset(sourceDatasetId, userDatasetId); + assertThat(bout.toString()).contains(sourceDatasetId + " updated with the added authorization"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/CopyMultipleTablesIT.java b/samples/snippets/src/test/java/com/example/bigquery/CopyMultipleTablesIT.java index 685db52d1d..e0d175c86e 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CopyMultipleTablesIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CopyMultipleTablesIT.java @@ -19,6 +19,9 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.util.UUID; @@ -32,22 +35,27 @@ public class CopyMultipleTablesIT { private final Logger log = Logger.getLogger(this.getClass().getName()); + private String datasetName; private String tableName; + private String sourceTable1Name; + private String sourceTable2Name; private ByteArrayOutputStream bout; private PrintStream out; private PrintStream originalPrintStream; - private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME"); + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); - private static void requireEnvVar(String varName) { + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); assertNotNull( "Environment variable " + varName + " is required to perform these tests.", System.getenv(varName)); + return value; } @BeforeClass public static void checkRequirements() { - requireEnvVar("BIGQUERY_DATASET_NAME"); + requireEnvVar("GOOGLE_CLOUD_PROJECT"); } @Before @@ -56,15 +64,30 @@ public void setUp() throws Exception { out = new PrintStream(bout); originalPrintStream = System.out; System.setOut(out); + // Create a new destination table for each test since existing table cannot be overwritten + datasetName = "MY_DATASET_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); tableName = "COPY_MULTIPLE_TABLE_TEST" + UUID.randomUUID().toString().substring(0, 8); - CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, null); + sourceTable1Name = + "COPY_MULTIPLE_TABLE_SOURCE1_TEST" + UUID.randomUUID().toString().substring(0, 8); + sourceTable2Name = + "COPY_MULTIPLE_TABLE_SOURCE2_TEST" + UUID.randomUUID().toString().substring(0, 8); + CreateDataset.createDataset(datasetName); + + Schema schema = + Schema.of( + Field.of("timestampField", StandardSQLTypeName.TIMESTAMP), + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("booleanField", StandardSQLTypeName.BOOL)); + CreateTable.createTable(datasetName, tableName, schema); + CreateTable.createTable(datasetName, sourceTable1Name, schema); + CreateTable.createTable(datasetName, sourceTable2Name, schema); } @After public void tearDown() { // Clean up - DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName); + DeleteDataset.deleteDataset(PROJECT_ID, datasetName); // restores print statements in the original method System.out.flush(); System.setOut(originalPrintStream); @@ -73,7 +96,8 @@ public void tearDown() { @Test public void testCopyMultipleTables() { - CopyMultipleTables.copyMultipleTables(BIGQUERY_DATASET_NAME, tableName); + CopyMultipleTables.copyMultipleTables( + datasetName, tableName, sourceTable1Name, sourceTable2Name); assertThat(bout.toString()).contains("Table copied successfully."); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/CopyTableCmekIT.java b/samples/snippets/src/test/java/com/example/bigquery/CopyTableCmekIT.java index d5b12a0e21..7a601cf951 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CopyTableCmekIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CopyTableCmekIT.java @@ -31,8 +31,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class CopyTableCmekIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateAndQueryRepeatedRecordFieldIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateAndQueryRepeatedRecordFieldIT.java new file mode 100644 index 0000000000..56ad47a5d8 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateAndQueryRepeatedRecordFieldIT.java @@ -0,0 +1,79 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class CreateAndQueryRepeatedRecordFieldIT { + + private final Logger log = Logger.getLogger(this.getClass().getName()); + private String tableName; + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME"); + + private static void requireEnvVar(String varName) { + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("BIGQUERY_DATASET_NAME"); + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + tableName = "MY_TABLE_NAME_" + UUID.randomUUID().toString().replace("-", "_"); + } + + @After + public void tearDown() { + // Clean up + DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName); + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testCreateAndQueryRepeatedRecordField() { + CreateAndQueryRepeatedRecordField.createAndQueryRepeatedRecordField( + BIGQUERY_DATASET_NAME, tableName); + assertThat(bout.toString()) + .contains("Query with Array of struct parameters performed successfully."); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateDatasetAwsIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateDatasetAwsIT.java index f4b5cf0f6b..6f59f0c49f 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreateDatasetAwsIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateDatasetAwsIT.java @@ -27,8 +27,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class CreateDatasetAwsIT { private static final String ID = UUID.randomUUID().toString().substring(0, 8); diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateDatasetWithRegionalEndpointIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateDatasetWithRegionalEndpointIT.java new file mode 100644 index 0000000000..da6f4afd7d --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateDatasetWithRegionalEndpointIT.java @@ -0,0 +1,56 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class CreateDatasetWithRegionalEndpointIT { + private final Logger log = Logger.getLogger(this.getClass().getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testCreateDatasetWithRegionalEndpoint() { + CreateDatasetWithRegionalEndpoint.createDatasetWithRegionalEndpoint(); + assertThat(bout.toString().contains("Region of dataset: us-east4")); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateExternalTableAwsIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateExternalTableAwsIT.java index 8aac2716e7..80be0bb26f 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreateExternalTableAwsIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateExternalTableAwsIT.java @@ -32,8 +32,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class CreateExternalTableAwsIT { private static final String ID = UUID.randomUUID().toString().substring(0, 8); diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateModelIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateModelIT.java index 52af26303d..dd18a02f1a 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreateModelIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateModelIT.java @@ -81,7 +81,7 @@ public void testCreateModel() { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateTableCmekIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateTableCmekIT.java index fd35c390e0..7169a8fce1 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreateTableCmekIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateTableCmekIT.java @@ -31,8 +31,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class CreateTableCmekIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateTableIT.java index af5104c1c6..000091a045 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/CreateTableIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateTableIT.java @@ -37,26 +37,25 @@ public class CreateTableIT { private final Logger log = Logger.getLogger(this.getClass().getName()); private String tableName; private ByteArrayOutputStream bout; - private PrintStream out; private PrintStream originalPrintStream; private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME"); - private static void requireEnvVar(String varName) { + private static void requireEnvVar() { assertNotNull( - "Environment variable " + varName + " is required to perform these tests.", - System.getenv(varName)); + "Environment variable BIGQUERY_DATASET_NAME is required to perform these tests.", + System.getenv("BIGQUERY_DATASET_NAME")); } @BeforeClass public static void checkRequirements() { - requireEnvVar("BIGQUERY_DATASET_NAME"); + requireEnvVar(); } @Before public void setUp() { bout = new ByteArrayOutputStream(); - out = new PrintStream(bout); + PrintStream out = new PrintStream(bout); originalPrintStream = System.out; System.setOut(out); tableName = "MY_TABLE_NAME_" + UUID.randomUUID().toString().replace("-", "_"); diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateTableTimestampIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateTableTimestampIT.java new file mode 100644 index 0000000000..b63d6eff63 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateTableTimestampIT.java @@ -0,0 +1,80 @@ +/* + * Copyright 2026 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; +import static org.junit.Assert.assertNotNull; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class CreateTableTimestampIT { + private final Logger log = Logger.getLogger(this.getClass().getName()); + private String tableName; + private ByteArrayOutputStream bout; + private PrintStream originalPrintStream; + + private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME"); + + private static void requireEnvVar() { + assertNotNull( + "Environment variable BIGQUERY_DATASET_NAME is required to perform these tests.", + System.getenv("BIGQUERY_DATASET_NAME")); + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar(); + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + PrintStream out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + tableName = "MY_TABLE_NAME_" + UUID.randomUUID().toString().replace("-", "_"); + } + + @After + public void tearDown() { + // Clean up + DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName); + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testCreateTable() { + Schema schema = + Schema.of(Field.of("timestampField", StandardSQLTypeName.TIMESTAMP)); + CreateTableTimestamp.createTable(BIGQUERY_DATASET_NAME, tableName, schema); + assertThat(bout.toString()).contains("Table created successfully"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/CreateTablesWithPrimaryAndForeignKeysIT.java b/samples/snippets/src/test/java/com/example/bigquery/CreateTablesWithPrimaryAndForeignKeysIT.java new file mode 100644 index 0000000000..00ba8fa441 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/CreateTablesWithPrimaryAndForeignKeysIT.java @@ -0,0 +1,82 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class CreateTablesWithPrimaryAndForeignKeysIT { + + private final Logger log = Logger.getLogger(this.getClass().getName()); + private String tableNamePk; + private String tableNameFk; + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME"); + + private static void requireEnvVar(String varName) { + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("BIGQUERY_DATASET_NAME"); + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + tableNamePk = "MY_TABLE_NAME_" + UUID.randomUUID().toString().replace("-", "_"); + tableNameFk = "MY_TABLE_NAME_" + UUID.randomUUID().toString().replace("-", "_"); + } + + @After + public void tearDown() { + // Clean up + DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableNamePk); + DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableNameFk); + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testCreateAndQueryRepeatedRecordField() { + CreateTablesWithPrimaryAndForeignKeys.createTablesWithPrimaryAndForeignKeys( + BIGQUERY_DATASET_NAME, tableNamePk, tableNameFk); + assertThat(bout.toString()) + .contains("Tables with primary and foreign keys created successfully."); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/DeleteModelIT.java b/samples/snippets/src/test/java/com/example/bigquery/DeleteModelIT.java index f33ad1d032..9df25a6581 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/DeleteModelIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/DeleteModelIT.java @@ -69,7 +69,7 @@ public void setUp() { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingIT.java b/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingIT.java new file mode 100644 index 0000000000..0ad5651018 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingIT.java @@ -0,0 +1,105 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.Collection; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class EnableOpenTelemetryTracingIT { + private final Logger log = Logger.getLogger(this.getClass().getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static class ConsoleSpanExporter + implements io.opentelemetry.sdk.trace.export.SpanExporter { + @Override + public CompletableResultCode export(Collection collection) { + if (collection.isEmpty()) { + return CompletableResultCode.ofFailure(); + } + for (SpanData data : collection) { + System.out.println(data); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testEnableOpenTelemetryTracing() { + final String tracerName = "testSampleTracer"; + final String datasetId = RemoteBigQueryHelper.generateDatasetName(); + + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.builder(new ConsoleSpanExporter()).build()) + .setSampler(Sampler.alwaysOn()) + .build(); + + OpenTelemetry otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).build(); + + final Tracer tracer = otel.getTracer(tracerName); + + EnableOpenTelemetryTracing.enableOpenTelemetry(tracer, datasetId); + + assertThat(bout.toString()).contains("com.google.cloud.bigquery.BigQuery.createDataset"); + assertThat(bout.toString()).contains("com.google.cloud.bigquery.BigQuery.deleteDataset"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpanIT.java b/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpanIT.java new file mode 100644 index 0000000000..482915008b --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/EnableOpenTelemetryTracingWithParentSpanIT.java @@ -0,0 +1,110 @@ +/* + * Copyright 2025 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import com.google.cloud.bigquery.testing.RemoteBigQueryHelper; +import io.opentelemetry.api.OpenTelemetry; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.sdk.OpenTelemetrySdk; +import io.opentelemetry.sdk.common.CompletableResultCode; +import io.opentelemetry.sdk.trace.SdkTracerProvider; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.opentelemetry.sdk.trace.export.SimpleSpanProcessor; +import io.opentelemetry.sdk.trace.samplers.Sampler; +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.time.LocalDate; +import java.util.Collection; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class EnableOpenTelemetryTracingWithParentSpanIT { + private final Logger log = Logger.getLogger(this.getClass().getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static class ConsoleSpanExporter + implements io.opentelemetry.sdk.trace.export.SpanExporter { + @Override + public CompletableResultCode export(Collection collection) { + if (collection.isEmpty()) { + return CompletableResultCode.ofFailure(); + } + for (SpanData data : collection) { + System.out.println(data); + } + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode flush() { + return CompletableResultCode.ofSuccess(); + } + + @Override + public CompletableResultCode shutdown() { + return CompletableResultCode.ofSuccess(); + } + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testEnableOpenTelemetryWithParentSpan() { + final String tracerName = "testSampleTracer"; + final String parentSpanName = "testSampleParentSpan"; + final String datasetId = RemoteBigQueryHelper.generateDatasetName(); + final LocalDate currentDate = LocalDate.now(); + + SdkTracerProvider tracerProvider = + SdkTracerProvider.builder() + .addSpanProcessor(SimpleSpanProcessor.builder(new ConsoleSpanExporter()).build()) + .setSampler(Sampler.alwaysOn()) + .build(); + + OpenTelemetry otel = OpenTelemetrySdk.builder().setTracerProvider(tracerProvider).build(); + + final Tracer tracer = otel.getTracer(tracerName); + + EnableOpenTelemetryTracingWithParentSpan.enableOpenTelemetryWithParentSpan( + tracer, parentSpanName, datasetId); + + assertThat(bout.toString()).contains(parentSpanName); + assertThat(bout.toString()) + .contains(String.format("AttributesMap{data={current_date=%s}", currentDate.toString())); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/ExportQueryResultsToS3IT.java b/samples/snippets/src/test/java/com/example/bigquery/ExportQueryResultsToS3IT.java index 61a5438120..b014bde602 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/ExportQueryResultsToS3IT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/ExportQueryResultsToS3IT.java @@ -26,8 +26,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class ExportQueryResultsToS3IT { private final Logger log = Logger.getLogger(this.getClass().getName()); private ByteArrayOutputStream bout; diff --git a/samples/snippets/src/test/java/com/example/bigquery/ExtractModelIT.java b/samples/snippets/src/test/java/com/example/bigquery/ExtractModelIT.java index 3e13e78167..ac0d131113 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/ExtractModelIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/ExtractModelIT.java @@ -26,8 +26,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class ExtractModelIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableCompressedIT.java b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableCompressedIT.java index 8fc5eed6ef..4e551ed818 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableCompressedIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableCompressedIT.java @@ -26,8 +26,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class ExtractTableCompressedIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToCsvIT.java b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToCsvIT.java index c3b6550bf9..838a989b69 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToCsvIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToCsvIT.java @@ -26,8 +26,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class ExtractTableToCsvIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java index 68f7903838..393188c7eb 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/ExtractTableToJsonIT.java @@ -27,8 +27,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class ExtractTableToJsonIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/GetModelIT.java b/samples/snippets/src/test/java/com/example/bigquery/GetModelIT.java index b7e1820d74..7a1c931ee6 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/GetModelIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/GetModelIT.java @@ -69,7 +69,7 @@ public void setUp() { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/test/java/com/example/bigquery/GrantViewAccessIT.java b/samples/snippets/src/test/java/com/example/bigquery/GrantViewAccessIT.java index f620c781e8..d8655a9d58 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/GrantViewAccessIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/GrantViewAccessIT.java @@ -36,6 +36,7 @@ public class GrantViewAccessIT { private final Logger log = Logger.getLogger(this.getClass().getName()); private String datasetName; + private String viewDatasetName; private String tableName; private String viewName; private ByteArrayOutputStream bout; @@ -43,7 +44,6 @@ public class GrantViewAccessIT { private PrintStream originalPrintStream; private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); - private static final String BIGQUERY_DATASET_NAME = requireEnvVar("BIGQUERY_DATASET_NAME"); private static String requireEnvVar(String varName) { String value = System.getenv(varName); @@ -56,7 +56,6 @@ private static String requireEnvVar(String varName) { @BeforeClass public static void checkRequirements() { requireEnvVar("GOOGLE_CLOUD_PROJECT"); - requireEnvVar("BIGQUERY_DATASET_NAME"); } @Before @@ -68,31 +67,34 @@ public void setUp() { // create a temporary dataset, table and view to be deleted. datasetName = "MY_DATASET_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); + viewDatasetName = "MY_VIEW_DATASET_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); tableName = "MY_TABLE_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); viewName = "MY_VIEW_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); CreateDataset.createDataset(datasetName); + CreateDataset.createDataset(viewDatasetName); Schema schema = Schema.of( Field.of("timestampField", StandardSQLTypeName.TIMESTAMP), Field.of("stringField", StandardSQLTypeName.STRING), Field.of("booleanField", StandardSQLTypeName.BOOL)); - CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema); + CreateTable.createTable(viewDatasetName, tableName, schema); String query = String.format( "SELECT timestampField, stringField, booleanField FROM %s.%s", - BIGQUERY_DATASET_NAME, tableName); - CreateView.createView(BIGQUERY_DATASET_NAME, viewName, query); + viewDatasetName, tableName); + CreateView.createView(viewDatasetName, viewName, query); } @After public void tearDown() { // Clean up - DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, viewName); - DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName); + DeleteTable.deleteTable(viewDatasetName, viewName); + DeleteTable.deleteTable(viewDatasetName, tableName); DeleteDataset.deleteDataset(PROJECT_ID, datasetName); + DeleteDataset.deleteDataset(PROJECT_ID, viewDatasetName); // restores print statements in the original method System.out.flush(); System.setOut(originalPrintStream); @@ -101,7 +103,7 @@ public void tearDown() { @Test public void testGrantViewAccess() { - GrantViewAccess.grantViewAccess(datasetName, BIGQUERY_DATASET_NAME, viewName); + GrantViewAccess.grantViewAccess(datasetName, viewDatasetName, viewName); assertThat(bout.toString()).contains("Grant view access successfully"); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/ListModelsIT.java b/samples/snippets/src/test/java/com/example/bigquery/ListModelsIT.java index fd99bc24ae..4ddac63bcb 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/ListModelsIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/ListModelsIT.java @@ -69,7 +69,7 @@ public void setUp() { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadCsvFromGcsTruncateTest.java b/samples/snippets/src/test/java/com/example/bigquery/LoadCsvFromGcsTruncateTest.java index 09e8d9c50f..6203676496 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadCsvFromGcsTruncateTest.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadCsvFromGcsTruncateTest.java @@ -30,8 +30,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class LoadCsvFromGcsTruncateTest { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadJsonFromGcsCmekIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadJsonFromGcsCmekIT.java index fb7ea8e522..c4507f11d7 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/LoadJsonFromGcsCmekIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadJsonFromGcsCmekIT.java @@ -28,8 +28,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class LoadJsonFromGcsCmekIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileInSessionIT.java b/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileInSessionIT.java new file mode 100644 index 0000000000..1bf6128784 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/LoadLocalFileInSessionIT.java @@ -0,0 +1,97 @@ +/* + * Copyright 2020 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertFalse; +import static junit.framework.TestCase.assertNotNull; + +import com.google.cloud.bigquery.Field; +import com.google.cloud.bigquery.FormatOptions; +import com.google.cloud.bigquery.LegacySQLTypeName; +import com.google.cloud.bigquery.Schema; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.nio.file.FileSystems; +import java.nio.file.Path; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class LoadLocalFileInSessionIT { + private final Logger log = Logger.getLogger(this.getClass().getName()); + private String tableName; + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String BIGQUERY_DATASET_NAME = System.getenv("BIGQUERY_DATASET_NAME"); + + private static void requireEnvVar(String varName) { + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("BIGQUERY_DATASET_NAME"); + } + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + tableName = "LOADLOCALFILETESTTABLE_" + UUID.randomUUID().toString().substring(0, 8); + Schema schema = + Schema.of( + Field.of("Name", LegacySQLTypeName.STRING), + Field.of("Age", LegacySQLTypeName.NUMERIC), + Field.of("Weight", LegacySQLTypeName.NUMERIC), + Field.of("IsMagic", LegacySQLTypeName.BOOLEAN)); + CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema); + } + + @After + public void tearDown() { + // Clean up + DeleteTable.deleteTable(BIGQUERY_DATASET_NAME, tableName); + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void loadLocalFileInSession() throws IOException, InterruptedException { + Path csvPath = FileSystems.getDefault().getPath("src/test/resources", "bigquery_noheader.csv"); + String sessionId = + LoadLocalFileInSession.createSessionForLoading( + BIGQUERY_DATASET_NAME, tableName, csvPath, FormatOptions.csv()); + assertFalse(sessionId.isEmpty()); + LoadLocalFileInSession.loadLocalFileInSession( + BIGQUERY_DATASET_NAME, tableName, csvPath, FormatOptions.csv(), sessionId); + assertThat(bout.toString()).contains("Successfully loaded to Session"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryClusteredTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryClusteredTableIT.java index 7b861be5fe..abdaed05f3 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/QueryClusteredTableIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryClusteredTableIT.java @@ -24,8 +24,10 @@ import java.util.logging.Logger; import org.junit.After; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class QueryClusteredTableIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryDestinationTableCmekIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryDestinationTableCmekIT.java index df7d3ec747..e4f21760d8 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/QueryDestinationTableCmekIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryDestinationTableCmekIT.java @@ -31,8 +31,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class QueryDestinationTableCmekIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryExternalBigtablePermIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryExternalBigtablePermIT.java index 5e0fd3caba..7f14a28719 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/QueryExternalBigtablePermIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryExternalBigtablePermIT.java @@ -34,8 +34,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class QueryExternalBigtablePermIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryExternalBigtableTempIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryExternalBigtableTempIT.java index 27208b196e..dce81d6374 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/QueryExternalBigtableTempIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryExternalBigtableTempIT.java @@ -34,8 +34,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class QueryExternalBigtableTempIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryExternalSheetsPermIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryExternalSheetsPermIT.java index 1eb72c4d94..fc16a4f8ea 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/QueryExternalSheetsPermIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryExternalSheetsPermIT.java @@ -30,8 +30,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class QueryExternalSheetsPermIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryExternalSheetsTempIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryExternalSheetsTempIT.java index 6a167d47a6..2098cce6ee 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/QueryExternalSheetsTempIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryExternalSheetsTempIT.java @@ -28,8 +28,10 @@ import java.util.logging.Logger; import org.junit.After; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class QueryExternalSheetsTempIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryExternalTableAwsIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryExternalTableAwsIT.java index 4759a4eb6d..504a72f45a 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/QueryExternalTableAwsIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryExternalTableAwsIT.java @@ -26,8 +26,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class QueryExternalTableAwsIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryJobOptionalIT.java similarity index 78% rename from samples/snippets/src/test/java/com/example/bigquery/QueryIT.java rename to samples/snippets/src/test/java/com/example/bigquery/QueryJobOptionalIT.java index 23aa8d409c..8e3e979098 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/QueryIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryJobOptionalIT.java @@ -26,7 +26,7 @@ import org.junit.Before; import org.junit.Test; -public class QueryIT { +public class QueryJobOptionalIT { private final Logger log = Logger.getLogger(this.getClass().getName()); private ByteArrayOutputStream bout; @@ -50,16 +50,13 @@ public void tearDown() { } @Test - public void testQuery() { + public void testQueryBatch() { String query = - "SELECT name, SUM(number) as total_people" - + " FROM `bigquery-public-data.usa_names.usa_1910_2013`" - + " WHERE state = 'TX'" - + " GROUP BY name, state" - + " ORDER BY total_people DESC" - + " LIMIT 20"; + "SELECT name, gender, SUM(number) AS total FROM " + + "bigquery-public-data.usa_names.usa_1910_2013 GROUP BY " + + "name, gender ORDER BY total DESC LIMIT 10"; - Query.query(query); - assertThat(bout.toString()).contains("Query performed successfully."); + QueryJobOptional.queryJobOptional(query); + assertThat(bout.toString()).contains("Query was run"); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryWithArrayOfStructsNamedParametersIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryWithArrayOfStructsNamedParametersIT.java new file mode 100644 index 0000000000..f0b524dda4 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryWithArrayOfStructsNamedParametersIT.java @@ -0,0 +1,58 @@ +/* + * Copyright 2023 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class QueryWithArrayOfStructsNamedParametersIT { + + private final Logger log = Logger.getLogger(this.getClass().getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testQueryWithNamedParameters() { + QueryWithArrayOfStructsNamedParameters.queryWithArrayOfStructsNamedParameters(); + assertThat(bout.toString()) + .contains("Query with Array of struct parameters performed successfully."); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/QueryWithTimestampParametersIT.java b/samples/snippets/src/test/java/com/example/bigquery/QueryWithTimestampParametersIT.java index eb6f79cb7b..32d707c307 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/QueryWithTimestampParametersIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/QueryWithTimestampParametersIT.java @@ -30,13 +30,12 @@ public class QueryWithTimestampParametersIT { private final Logger log = Logger.getLogger(this.getClass().getName()); private ByteArrayOutputStream bout; - private PrintStream out; private PrintStream originalPrintStream; @Before public void setUp() { bout = new ByteArrayOutputStream(); - out = new PrintStream(bout); + PrintStream out = new PrintStream(bout); originalPrintStream = System.out; System.setOut(out); } @@ -54,4 +53,10 @@ public void testQueryWithTimestampParameters() { QueryWithTimestampParameters.queryWithTimestampParameters(); assertThat(bout.toString()).contains("Query with timestamp parameter performed successfully."); } + + @Test + public void testQueryFromTableTimestampParameters() { + QueryWithTimestampParameters.queryFromTableTimestampParameters(); + assertThat(bout.toString()).contains("Query with timestamp parameter performed successfully."); + } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/SetUserAgentTest.java b/samples/snippets/src/test/java/com/example/bigquery/SetUserAgentTest.java new file mode 100644 index 0000000000..6c5c9cf6d3 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/SetUserAgentTest.java @@ -0,0 +1,79 @@ +/* + * Copyright 2022 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.PrintStream; +import java.util.UUID; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class SetUserAgentTest { + + private final Logger log = Logger.getLogger(this.getClass().getName()); + private String customUserAgentValue; + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("GOOGLE_CLOUD_PROJECT"); + } + + @Before + public void setUp() { + customUserAgentValue = "CUSTOM_USER_AGENT_" + UUID.randomUUID().toString().substring(0, 8); + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // Clean up + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void setUserAgentTest() throws IOException { + SetUserAgent.setUserAgent(PROJECT_ID, customUserAgentValue); + assertThat(bout.toString()).contains("CUSTOM_USER_AGENT_"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java b/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java index 5ac54beebb..4c4030c7dc 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/SimpleAppIT.java @@ -17,6 +17,7 @@ package com.example.bigquery; import static com.google.common.truth.Truth.assertThat; +import static junit.framework.TestCase.assertNotNull; import java.io.ByteArrayOutputStream; import java.io.PrintStream; @@ -24,6 +25,7 @@ import java.util.logging.Logger; import org.junit.After; import org.junit.Before; +import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; @@ -37,6 +39,20 @@ public class SimpleAppIT { private ByteArrayOutputStream bout; private PrintStream out; private PrintStream originalPrintStream; + private static final String PROJECT_ID = requireEnvVar("GOOGLE_CLOUD_PROJECT"); + + private static String requireEnvVar(String varName) { + String value = System.getenv(varName); + assertNotNull( + "Environment variable " + varName + " is required to perform these tests.", + System.getenv(varName)); + return value; + } + + @BeforeClass + public static void checkRequirements() { + requireEnvVar("GOOGLE_CLOUD_PROJECT"); + } @Before public void setUp() { @@ -56,8 +72,8 @@ public void tearDown() { @Test public void testQuickstart() throws Exception { - SimpleApp.main(); + SimpleApp.simpleApp(PROJECT_ID); String got = bout.toString(); - assertThat(got).contains("Repo name:"); + assertThat(got).contains("https://stackoverflow.com/questions/"); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/SimpleQueryConnectionReadApiIT.java b/samples/snippets/src/test/java/com/example/bigquery/SimpleQueryConnectionReadApiIT.java new file mode 100644 index 0000000000..b7cb109c77 --- /dev/null +++ b/samples/snippets/src/test/java/com/example/bigquery/SimpleQueryConnectionReadApiIT.java @@ -0,0 +1,61 @@ +/* + * Copyright 2024 Google LLC + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.example.bigquery; + +import static com.google.common.truth.Truth.assertThat; + +import java.io.ByteArrayOutputStream; +import java.io.PrintStream; +import java.util.logging.Level; +import java.util.logging.Logger; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +public class SimpleQueryConnectionReadApiIT { + + private final Logger log = Logger.getLogger(this.getClass().getName()); + private ByteArrayOutputStream bout; + private PrintStream out; + private PrintStream originalPrintStream; + + @Before + public void setUp() { + bout = new ByteArrayOutputStream(); + out = new PrintStream(bout); + originalPrintStream = System.out; + System.setOut(out); + } + + @After + public void tearDown() { + // restores print statements in the original method + System.out.flush(); + System.setOut(originalPrintStream); + log.log(Level.INFO, "\n" + bout.toString()); + } + + @Test + public void testSimpleQueryConnectionReadApi() { + String query = + "SELECT corpus, count(*) as corpus_count " + + "FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; + + SimpleQueryConnectionReadApi.simpleQueryConnectionReadApi(query); + assertThat(bout.toString()).contains("Query ran successfully"); + } +} diff --git a/samples/snippets/src/test/java/com/example/bigquery/SimpleQueryIT.java b/samples/snippets/src/test/java/com/example/bigquery/SimpleQueryIT.java index b750c31464..5441ffccad 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/SimpleQueryIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/SimpleQueryIT.java @@ -51,7 +51,9 @@ public void tearDown() { @Test public void testSimpleQuery() { - String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; + String query = + "SELECT corpus, count(*) as corpus_count " + + "FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;"; SimpleQuery.simpleQuery(query); assertThat(bout.toString()).contains("Query ran successfully"); diff --git a/samples/snippets/src/test/java/com/example/bigquery/TableExistsIT.java b/samples/snippets/src/test/java/com/example/bigquery/TableExistsIT.java index 4ff04e4efa..4573648a61 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/TableExistsIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/TableExistsIT.java @@ -61,7 +61,6 @@ public void setUp() { System.setOut(out); // create a temporary table tableName = "MY_TABLE_NAME_TEST_" + UUID.randomUUID().toString().substring(0, 8); - CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, Schema.of()); } @After @@ -76,6 +75,9 @@ public void tearDown() { @Test public void testTableExists() { + TableExists.tableExists(BIGQUERY_DATASET_NAME, tableName); + assertThat(bout.toString()).contains("Table not found"); + CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, Schema.of()); TableExists.tableExists(BIGQUERY_DATASET_NAME, tableName); assertThat(bout.toString()).contains("Table already exist"); } diff --git a/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java b/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java index 84a344cd26..d479530120 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/TableInsertRowsIT.java @@ -88,8 +88,9 @@ public void testTableInsertRows() { Map rowContent = new HashMap<>(); rowContent.put("booleanField", true); rowContent.put("numericField", "3.14"); + String rowId = "ROW_ID"; // Testing - TableInsertRows.tableInsertRows(BIGQUERY_DATASET_NAME, tableName, rowContent); + TableInsertRows.tableInsertRows(BIGQUERY_DATASET_NAME, tableName, rowId, rowContent); assertThat(bout.toString()).contains("Rows successfully inserted into table"); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/UndeleteTableIT.java b/samples/snippets/src/test/java/com/example/bigquery/UndeleteTableIT.java index f6c725db6b..f53b0e41fe 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UndeleteTableIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UndeleteTableIT.java @@ -19,7 +19,9 @@ import static com.google.common.truth.Truth.assertThat; import static junit.framework.TestCase.assertNotNull; +import com.google.cloud.bigquery.Field; import com.google.cloud.bigquery.Schema; +import com.google.cloud.bigquery.StandardSQLTypeName; import java.io.ByteArrayOutputStream; import java.io.PrintStream; import java.util.UUID; @@ -62,7 +64,11 @@ public void setUp() { tableName = "UNDELETE_TABLE_TEST_" + UUID.randomUUID().toString().substring(0, 8); recoverTableName = "RECOVER_DELETE_TABLE_TEST_" + UUID.randomUUID().toString().substring(0, 8); // Create table in dataset for testing - CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, Schema.of()); + Schema schema = + Schema.of( + Field.of("stringField", StandardSQLTypeName.STRING), + Field.of("booleanField", StandardSQLTypeName.BOOL)); + CreateTable.createTable(BIGQUERY_DATASET_NAME, tableName, schema); } @After diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java index d4ec62df2b..2a1ed26f9d 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateDatasetAccessIT.java @@ -30,8 +30,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class UpdateDatasetAccessIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/AlterMaterializedViewIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateMaterializedViewIT.java similarity index 90% rename from samples/snippets/src/test/java/com/example/bigquery/AlterMaterializedViewIT.java rename to samples/snippets/src/test/java/com/example/bigquery/UpdateMaterializedViewIT.java index 3cc5dfa7c4..54ed2194a4 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/AlterMaterializedViewIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateMaterializedViewIT.java @@ -32,7 +32,7 @@ import org.junit.BeforeClass; import org.junit.Test; -public class AlterMaterializedViewIT { +public class UpdateMaterializedViewIT { private static final String ID = UUID.randomUUID().toString().substring(0, 8); private final Logger log = Logger.getLogger(this.getClass().getName()); @@ -65,7 +65,7 @@ public void setUp() { System.setOut(out); tableName = "MY_TABLE_NAME_TEST_" + ID; - materializedViewName = "MY_ALTER_MATERIALIZED_VIEW_NAME_TEST_" + ID; + materializedViewName = "MY_UPDATE_MATERIALIZED_VIEW_NAME_TEST_" + ID; Schema schema = Schema.of( @@ -95,8 +95,8 @@ public void tearDown() { } @Test - public void testAlterMaterializedView() { - AlterMaterializedView.alterMaterializedView(BIGQUERY_DATASET_NAME, materializedViewName); - assertThat(bout.toString()).contains("Materialized view altered successfully"); + public void testUpdateMaterializedView() { + UpdateMaterializedView.updateMaterializedView(BIGQUERY_DATASET_NAME, materializedViewName); + assertThat(bout.toString()).contains("Materialized view updated successfully"); } } diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateModelDescriptionIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateModelDescriptionIT.java index 156e83d170..478cfec972 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateModelDescriptionIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateModelDescriptionIT.java @@ -68,7 +68,7 @@ public void setUp() { + "`" + "OPTIONS ( " + "model_type='linear_reg', " - + "max_iteration=1, " + + "max_iterations=1, " + "learn_rate=0.4, " + "learn_rate_strategy='constant' " + ") AS ( " diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableCmekIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableCmekIT.java index 3c6417f44f..ddeb7a0220 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableCmekIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableCmekIT.java @@ -31,8 +31,10 @@ import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.Ignore; import org.junit.Test; +@Ignore public class UpdateTableCmekIT { private final Logger log = Logger.getLogger(this.getClass().getName()); diff --git a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java index e9f03e87e5..1e3b8980f9 100644 --- a/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java +++ b/samples/snippets/src/test/java/com/example/bigquery/UpdateTableExpirationIT.java @@ -24,8 +24,9 @@ import com.google.cloud.bigquery.StandardSQLTypeName; import java.io.ByteArrayOutputStream; import java.io.PrintStream; +import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.UUID; -import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; import org.junit.After; @@ -81,7 +82,8 @@ public void tearDown() { @Test public void testUpdateTableExpiration() { - Long newExpiration = TimeUnit.MILLISECONDS.convert(1, TimeUnit.DAYS); + // Set new expiration to a week from Now + Long newExpiration = Instant.now().plus(7, ChronoUnit.DAYS).toEpochMilli(); UpdateTableExpiration.updateTableExpiration(BIGQUERY_DATASET_NAME, tableName, newExpiration); assertThat(bout.toString()).contains("Table expiration updated successfully"); } diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index f445777bbe..0000000000 --- a/synth.metadata +++ /dev/null @@ -1,91 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/java-bigquery.git", - "sha": "5ab251eed173bd3554c444c394d088923255b29a" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "6946fd71ae9215b0e7ae188f5057df765ee6d7d2" - } - } - ], - "generatedFiles": [ - ".github/CODEOWNERS", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/blunderbuss.yml", - ".github/generated-files-bot.yml", - ".github/readme/synth.py", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".github/trusted-contribution.yml", - ".github/workflows/approve-readme.yaml", - ".github/workflows/auto-release.yaml", - ".github/workflows/ci.yaml", - ".github/workflows/samples.yaml", - ".kokoro/build.bat", - ".kokoro/build.sh", - ".kokoro/coerce_logs.sh", - ".kokoro/common.cfg", - ".kokoro/common.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/java8.cfg", - ".kokoro/continuous/readme.cfg", - ".kokoro/dependencies.sh", - ".kokoro/nightly/common.cfg", - ".kokoro/nightly/integration.cfg", - ".kokoro/nightly/java11.cfg", - ".kokoro/nightly/java7.cfg", - ".kokoro/nightly/java8-osx.cfg", - ".kokoro/nightly/java8-win.cfg", - ".kokoro/nightly/java8.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/clirr.cfg", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/dependencies.cfg", - ".kokoro/presubmit/integration.cfg", - ".kokoro/presubmit/java11.cfg", - ".kokoro/presubmit/java7.cfg", - ".kokoro/presubmit/java8-osx.cfg", - ".kokoro/presubmit/java8-win.cfg", - ".kokoro/presubmit/java8.cfg", - ".kokoro/presubmit/linkage-monitor.cfg", - ".kokoro/presubmit/lint.cfg", - ".kokoro/presubmit/samples.cfg", - ".kokoro/readme.sh", - ".kokoro/release/bump_snapshot.cfg", - ".kokoro/release/common.cfg", - ".kokoro/release/common.sh", - ".kokoro/release/drop.cfg", - ".kokoro/release/drop.sh", - ".kokoro/release/promote.cfg", - ".kokoro/release/promote.sh", - ".kokoro/release/publish_javadoc.cfg", - ".kokoro/release/publish_javadoc.sh", - ".kokoro/release/publish_javadoc11.cfg", - ".kokoro/release/publish_javadoc11.sh", - ".kokoro/release/snapshot.cfg", - ".kokoro/release/snapshot.sh", - ".kokoro/release/stage.cfg", - ".kokoro/release/stage.sh", - ".kokoro/trampoline.sh", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.md", - "LICENSE", - "java.header", - "license-checks.xml", - "renovate.json", - "samples/install-without-bom/pom.xml", - "samples/pom.xml", - "samples/snapshot/pom.xml", - "samples/snippets/pom.xml" - ] -} \ No newline at end of file diff --git a/synth.py b/synth.py deleted file mode 100644 index 8d5aa0e5b7..0000000000 --- a/synth.py +++ /dev/null @@ -1,30 +0,0 @@ -# Copyright 2019 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import synthtool.languages.java as java - -AUTOSYNTH_MULTIPLE_COMMITS = True - -java.common_templates(excludes=[ - '.kokoro/continuous/java8-samples.cfg', - '.kokoro/continuous/java11-samples.cfg', - '.kokoro/nightly/java8-samples.cfg', - '.kokoro/nightly/java11-samples.cfg', - '.kokoro/nightly/samples.cfg', - '.kokoro/presubmit/java8-samples.cfg', - '.kokoro/presubmit/java11-samples.cfg', - 'codecov.yaml' -]) diff --git a/versions.txt b/versions.txt index 3089393371..c52d24dc4e 100644 --- a/versions.txt +++ b/versions.txt @@ -1,4 +1,5 @@ # Format: # module:released-version:current-version -google-cloud-bigquery:1.127.4:1.127.5-SNAPSHOT \ No newline at end of file +google-cloud-bigquery:2.60.0:2.60.1-SNAPSHOT +google-cloud-bigquery-jdbc:0.4.0:0.4.1-SNAPSHOT