diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
deleted file mode 100644
index 508ba98efe..0000000000
--- a/.github/.OwlBot.lock.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2025 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-docker:
- image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
- digest: sha256:25de45b58e52021d3a24a6273964371a97a4efeefe6ad3845a64e697c63b6447
-# created: 2025-04-14T14:34:43.260858345Z
diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml
deleted file mode 100644
index 5db16e2a9d..0000000000
--- a/.github/.OwlBot.yaml
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2021 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-docker:
- image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest
-
-deep-remove-regex:
- - /owl-bot-staging
-
-deep-copy-regex:
- - source: /google/spanner/(v.*)/.*-py/(.*)
- dest: /owl-bot-staging/spanner/$1/$2
- - source: /google/spanner/admin/instance/(v.*)/.*-py/(.*)
- dest: /owl-bot-staging/spanner_admin_instance/$1/$2
- - source: /google/spanner/admin/database/(v.*)/.*-py/(.*)
- dest: /owl-bot-staging/spanner_admin_database/$1/$2
-
-begin-after-commit-hash: b154da710c5c9eedee127c07f74b6158c9c22382
-
diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml
deleted file mode 100644
index 311ebbb853..0000000000
--- a/.github/auto-approve.yml
+++ /dev/null
@@ -1,3 +0,0 @@
-# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve
-processes:
- - "OwlBotTemplateChanges"
diff --git a/.github/release-please.yml b/.github/release-please.yml
deleted file mode 100644
index dbd2cc9deb..0000000000
--- a/.github/release-please.yml
+++ /dev/null
@@ -1,15 +0,0 @@
-releaseType: python
-handleGHRelease: true
-manifest: true
-# NOTE: this section is generated by synthtool.languages.python
-# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py
-branches:
-- branch: v2
- handleGHRelease: true
- releaseType: python
-- branch: v1
- handleGHRelease: true
- releaseType: python
-- branch: v0
- handleGHRelease: true
- releaseType: python
diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml
deleted file mode 100644
index 3c0f1bfc7e..0000000000
--- a/.github/release-trigger.yml
+++ /dev/null
@@ -1,2 +0,0 @@
-enabled: true
-multiScmName: python-spanner
diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml
deleted file mode 100644
index d726d1193d..0000000000
--- a/.github/sync-repo-settings.yaml
+++ /dev/null
@@ -1,15 +0,0 @@
-# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings
-# Rules for main branch protection
-branchProtectionRules:
-# Identifies the protection rule pattern. Name of the branch to be protected.
-# Defaults to `main`
-- pattern: main
- requiresCodeOwnerReviews: true
- requiresStrictStatusChecks: true
- requiredStatusCheckContexts:
- - 'Kokoro'
- - 'Kokoro system-3.12'
- - 'cla/google'
- - 'Samples - Lint'
- - 'Samples - Python 3.9'
- - 'Samples - Python 3.12'
diff --git a/.github/workflows/integration-tests-against-emulator-with-regular-session.yaml b/.github/workflows/integration-tests-against-emulator-with-regular-session.yaml
index 826a3b7629..3f2d3b7ba2 100644
--- a/.github/workflows/integration-tests-against-emulator-with-regular-session.yaml
+++ b/.github/workflows/integration-tests-against-emulator-with-regular-session.yaml
@@ -21,7 +21,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
- python-version: 3.13
+ python-version: 3.14
- name: Install nox
run: python -m pip install nox
- name: Run system tests
diff --git a/.github/workflows/integration-tests-against-emulator.yaml b/.github/workflows/integration-tests-against-emulator.yaml
index e7158307b8..e8078107bc 100644
--- a/.github/workflows/integration-tests-against-emulator.yaml
+++ b/.github/workflows/integration-tests-against-emulator.yaml
@@ -21,7 +21,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
- python-version: 3.13
+ python-version: 3.14
- name: Install nox
run: python -m pip install nox
- name: Run system tests
diff --git a/.github/workflows/mock_server_tests.yaml b/.github/workflows/mock_server_tests.yaml
index b705c98191..d16feac517 100644
--- a/.github/workflows/mock_server_tests.yaml
+++ b/.github/workflows/mock_server_tests.yaml
@@ -14,7 +14,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
- python-version: 3.13
+ python-version: 3.14
- name: Install nox
run: python -m pip install nox
- name: Run mock server tests
diff --git a/.github/workflows/presubmit.yaml b/.github/workflows/presubmit.yaml
index 67db6136d1..56386a746c 100644
--- a/.github/workflows/presubmit.yaml
+++ b/.github/workflows/presubmit.yaml
@@ -17,7 +17,7 @@ jobs:
- name: Setup Python
uses: actions/setup-python@v6
with:
- python-version: 3.13
+ python-version: 3.14
- name: Install nox
run: python -m pip install nox
- name: Check formatting
@@ -27,7 +27,7 @@ jobs:
strategy:
fail-fast: false
matrix:
- python: ["3.9", "3.10", "3.11", "3.12", "3.13"]
+ python: ["3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
steps:
- name: Checkout code
diff --git a/.kokoro/presubmit/integration-regular-sessions-enabled.cfg b/.kokoro/presubmit/integration-regular-sessions-enabled.cfg
index 1f646bebf2..439abd4ba5 100644
--- a/.kokoro/presubmit/integration-regular-sessions-enabled.cfg
+++ b/.kokoro/presubmit/integration-regular-sessions-enabled.cfg
@@ -3,7 +3,7 @@
# Only run a subset of all nox sessions
env_vars: {
key: "NOX_SESSION"
- value: "unit-3.9 unit-3.12 system-3.12"
+ value: "unit-3.9 unit-3.14 system-3.14"
}
env_vars: {
diff --git a/.kokoro/presubmit/system-3.12.cfg b/.kokoro/presubmit/system-3.14.cfg
similarity index 81%
rename from .kokoro/presubmit/system-3.12.cfg
rename to .kokoro/presubmit/system-3.14.cfg
index 78cdc5e851..73904141ba 100644
--- a/.kokoro/presubmit/system-3.12.cfg
+++ b/.kokoro/presubmit/system-3.14.cfg
@@ -3,5 +3,5 @@
# Only run this nox session.
env_vars: {
key: "NOX_SESSION"
- value: "system-3.12"
-}
\ No newline at end of file
+ value: "system-3.14"
+}
diff --git a/.kokoro/samples/python3.14/common.cfg b/.kokoro/samples/python3.14/common.cfg
new file mode 100644
index 0000000000..35d0991eff
--- /dev/null
+++ b/.kokoro/samples/python3.14/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.14"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-314"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-spanner/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-spanner/.kokoro/trampoline_v2.sh"
diff --git a/.kokoro/samples/python3.14/continuous.cfg b/.kokoro/samples/python3.14/continuous.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/python3.14/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.14/periodic-head.cfg b/.kokoro/samples/python3.14/periodic-head.cfg
new file mode 100644
index 0000000000..b6133a1180
--- /dev/null
+++ b/.kokoro/samples/python3.14/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-spanner/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.14/periodic.cfg b/.kokoro/samples/python3.14/periodic.cfg
new file mode 100644
index 0000000000..71cd1e597e
--- /dev/null
+++ b/.kokoro/samples/python3.14/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
diff --git a/.kokoro/samples/python3.14/presubmit.cfg b/.kokoro/samples/python3.14/presubmit.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/python3.14/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.librarian/generator-input/.repo-metadata.json b/.librarian/generator-input/.repo-metadata.json
new file mode 100644
index 0000000000..9569af6e31
--- /dev/null
+++ b/.librarian/generator-input/.repo-metadata.json
@@ -0,0 +1,18 @@
+{
+ "name": "spanner",
+ "name_pretty": "Cloud Spanner",
+ "product_documentation": "https://cloud.google.com/spanner/docs/",
+ "client_documentation": "https://cloud.google.com/python/docs/reference/spanner/latest",
+ "issue_tracker": "https://issuetracker.google.com/issues?q=componentid:190851%2B%20status:open",
+ "release_level": "stable",
+ "language": "python",
+ "library_type": "GAPIC_COMBO",
+ "repo": "googleapis/python-spanner",
+ "distribution_name": "google-cloud-spanner",
+ "api_id": "spanner.googleapis.com",
+ "requires_billing": true,
+ "default_version": "v1",
+ "codeowner_team": "@googleapis/spanner-client-libraries-python",
+ "api_shortname": "spanner",
+ "api_description": "is a fully managed, mission-critical, \nrelational database service that offers transactional consistency at global scale, \nschemas, SQL (ANSI 2011 with extensions), and automatic, synchronous replication \nfor high availability.\n\nBe sure to activate the Cloud Spanner API on the Developer's Console to\nuse Cloud Spanner from your project."
+}
diff --git a/owlbot.py b/.librarian/generator-input/librarian.py
similarity index 68%
rename from owlbot.py
rename to .librarian/generator-input/librarian.py
index cf460877a3..46c2e8dbb4 100644
--- a/owlbot.py
+++ b/.librarian/generator-input/librarian.py
@@ -17,6 +17,7 @@
from pathlib import Path
import shutil
from typing import List, Optional
+import re
import synthtool as s
from synthtool import gcp
@@ -24,58 +25,9 @@
common = gcp.CommonTemplates()
-
-def get_staging_dirs(
- # This is a customized version of the s.get_staging_dirs() function
- # from synthtool to # cater for copying 3 different folders from
- # googleapis-gen:
- # spanner, spanner/admin/instance and spanner/admin/database.
- # Source:
- # https://github.com/googleapis/synthtool/blob/master/synthtool/transforms.py#L280
- default_version: Optional[str] = None,
- sub_directory: Optional[str] = None,
-) -> List[Path]:
- """Returns the list of directories, one per version, copied from
- https://github.com/googleapis/googleapis-gen. Will return in lexical sorting
- order with the exception of the default_version which will be last (if specified).
-
- Args:
- default_version (str): the default version of the API. The directory for this version
- will be the last item in the returned list if specified.
- sub_directory (str): if a `sub_directory` is provided, only the directories within the
- specified `sub_directory` will be returned.
-
- Returns: the empty list if no file were copied.
- """
-
- staging = Path("owl-bot-staging")
-
- if sub_directory:
- staging /= sub_directory
-
- if staging.is_dir():
- # Collect the subdirectories of the staging directory.
- versions = [v.name for v in staging.iterdir() if v.is_dir()]
- # Reorder the versions so the default version always comes last.
- versions = [v for v in versions if v != default_version]
- versions.sort()
- if default_version is not None:
- versions += [default_version]
- dirs = [staging / v for v in versions]
- for dir in dirs:
- s._tracked_paths.add(dir)
- return dirs
- else:
- return []
-
-
-spanner_default_version = "v1"
-spanner_admin_instance_default_version = "v1"
-spanner_admin_database_default_version = "v1"
-
clean_up_generated_samples = True
-for library in get_staging_dirs(spanner_default_version, "spanner"):
+for library in s.get_staging_dirs("v1"):
if clean_up_generated_samples:
shutil.rmtree("samples/generated_samples", ignore_errors=True)
clean_up_generated_samples = False
@@ -185,6 +137,53 @@ def get_staging_dirs(
)"""
)
+ count = s.replace(
+ [
+ library / "google/cloud/spanner_v1/services/*/transports/grpc*",
+ library / "tests/unit/gapic/spanner_v1/*",
+ ],
+ "^\s+options=\\[.*?\\]",
+ """options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
+ ]""",
+ flags=re.MULTILINE | re.DOTALL,
+ )
+ if count < 1:
+ raise Exception("Expected replacements for gRPC channel options not made.")
+
+ count = s.replace(
+ [
+ library / "google/cloud/spanner_admin_instance_v1/services/*/transports/grpc*",
+ library / "tests/unit/gapic/spanner_admin_instance_v1/*",
+ ],
+ "^\s+options=\\[.*?\\]",
+ """options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
+ ]""",
+ flags=re.MULTILINE | re.DOTALL,
+ )
+ if count < 1:
+ raise Exception("Expected replacements for gRPC channel options not made.")
+
+ count = s.replace(
+ [
+ library / "google/cloud/spanner_admin_database_v1/services/*/transports/grpc*",
+ library / "tests/unit/gapic/spanner_admin_database_v1/*",
+ ],
+ "^\s+options=\\[.*?\\]",
+ """options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
+ ]""",
+ flags=re.MULTILINE | re.DOTALL,
+ )
+ if count < 1:
+ raise Exception("Expected replacements for gRPC channel options not made.")
s.move(
library,
excludes=[
@@ -193,27 +192,12 @@ def get_staging_dirs(
"noxfile.py",
"docs/index.rst",
"google/cloud/spanner_v1/__init__.py",
- "**/gapic_version.py",
"testing/constraints-3.7.txt",
+ "google/cloud/spanner_admin_instance/**",
+ "google/cloud/spanner_admin_database/**"
],
)
-for library in get_staging_dirs(
- spanner_admin_instance_default_version, "spanner_admin_instance"
-):
- s.move(
- library,
- excludes=["google/cloud/spanner_admin_instance/**", "*.*", "docs/index.rst", "noxfile.py", "**/gapic_version.py", "testing/constraints-3.7.txt",],
- )
-
-for library in get_staging_dirs(
- spanner_admin_database_default_version, "spanner_admin_database"
-):
- s.move(
- library,
- excludes=["google/cloud/spanner_admin_database/**", "*.*", "docs/index.rst", "noxfile.py", "**/gapic_version.py", "testing/constraints-3.7.txt",],
- )
-
s.remove_staging_dirs()
# ----------------------------------------------------------------------------
@@ -225,36 +209,28 @@ def get_staging_dirs(
cov_level=98,
split_system_tests=True,
system_test_extras=["tracing"],
- system_test_python_versions=["3.12"]
+ system_test_python_versions=["3.12"],
+ unit_test_python_versions=["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
)
s.move(
templated_files,
excludes=[
".coveragerc",
- ".github/workflows", # exclude gh actions as credentials are needed for tests
+ ".github/**",
+ ".kokoro/**",
"README.rst",
- ".github/release-please.yml",
- ".kokoro/test-samples-impl.sh",
- ".kokoro/presubmit/presubmit.cfg",
- ".kokoro/samples/python3.7/**",
- ".kokoro/samples/python3.8/**",
],
)
-# Ensure CI runs on a new instance each time
-s.replace(
- ".kokoro/build.sh",
- "# Setup project id.",
- """\
-# Set up creating a new instance for each system test run
-export GOOGLE_CLOUD_TESTS_CREATE_SPANNER_INSTANCE=true
-
-# Setup project id.""",
-)
-
# Update samples folder in CONTRIBUTING.rst
s.replace("CONTRIBUTING.rst", "samples/snippets", "samples/samples")
+s.replace(
+ "noxfile.py",
+ '''session.python in \("3.11", "3.12", "3.13"\)''',
+ '''session.python in ("3.11", "3.12", "3.13", "3.14")'''
+)
+
# ----------------------------------------------------------------------------
# Samples templates
# ----------------------------------------------------------------------------
@@ -274,4 +250,4 @@ def get_staging_dirs(
# Use a python runtime which is available in the owlbot post processor here
# https://github.com/googleapis/synthtool/blob/master/docker/owlbot/python/Dockerfile
-s.shell.run(["nox", "-s", "blacken-3.10"], hide_output=False)
+s.shell.run(["nox", "-s", "blacken-3.14"], hide_output=False)
diff --git a/.librarian/generator-input/noxfile.py b/.librarian/generator-input/noxfile.py
new file mode 100644
index 0000000000..81c522d0d5
--- /dev/null
+++ b/.librarian/generator-input/noxfile.py
@@ -0,0 +1,595 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2024 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by synthtool. DO NOT EDIT!
+
+from __future__ import absolute_import
+
+import os
+import pathlib
+import re
+import shutil
+from typing import Dict, List
+import warnings
+
+import nox
+
+FLAKE8_VERSION = "flake8==6.1.0"
+BLACK_VERSION = "black[jupyter]==23.7.0"
+ISORT_VERSION = "isort==5.11.0"
+LINT_PATHS = ["google", "tests", "noxfile.py", "setup.py"]
+
+DEFAULT_PYTHON_VERSION = "3.14"
+
+DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION = "3.12"
+SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"]
+
+UNIT_TEST_PYTHON_VERSIONS: List[str] = [
+ "3.9",
+ "3.10",
+ "3.11",
+ "3.12",
+ "3.13",
+]
+UNIT_TEST_STANDARD_DEPENDENCIES = [
+ "mock",
+ "asyncmock",
+ "pytest",
+ "pytest-cov",
+ "pytest-asyncio",
+]
+MOCK_SERVER_ADDITIONAL_DEPENDENCIES = [
+ "google-cloud-testutils",
+]
+UNIT_TEST_EXTERNAL_DEPENDENCIES: List[str] = []
+UNIT_TEST_LOCAL_DEPENDENCIES: List[str] = []
+UNIT_TEST_DEPENDENCIES: List[str] = []
+UNIT_TEST_EXTRAS: List[str] = []
+UNIT_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {}
+
+SYSTEM_TEST_STANDARD_DEPENDENCIES: List[str] = [
+ "mock",
+ "pytest",
+ "google-cloud-testutils",
+]
+SYSTEM_TEST_EXTERNAL_DEPENDENCIES: List[str] = []
+SYSTEM_TEST_LOCAL_DEPENDENCIES: List[str] = []
+SYSTEM_TEST_DEPENDENCIES: List[str] = []
+SYSTEM_TEST_EXTRAS: List[str] = [
+ "tracing",
+]
+SYSTEM_TEST_EXTRAS_BY_PYTHON: Dict[str, List[str]] = {}
+
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
+
+nox.options.sessions = [
+ "unit-3.9",
+ "unit-3.10",
+ "unit-3.11",
+ "unit-3.12",
+ "unit-3.13",
+ "system",
+ "cover",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+ "docfx",
+ "format",
+]
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def lint(session):
+ """Run linters.
+
+ Returns a failure if the linters find linting errors or sufficiently
+ serious code quality issues.
+ """
+ session.install(FLAKE8_VERSION, BLACK_VERSION)
+ session.run(
+ "black",
+ "--check",
+ *LINT_PATHS,
+ )
+ session.run("flake8", "google", "tests")
+
+
+# Use a python runtime which is available in the owlbot post processor here
+# https://github.com/googleapis/synthtool/blob/master/docker/owlbot/python/Dockerfile
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def blacken(session):
+ """Run black. Format code to uniform standard."""
+ session.install(BLACK_VERSION)
+ session.run(
+ "black",
+ *LINT_PATHS,
+ )
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def format(session):
+ """
+ Run isort to sort imports. Then run black
+ to format code to uniform standard.
+ """
+ session.install(BLACK_VERSION, ISORT_VERSION)
+ # Use the --fss option to sort imports using strict alphabetical order.
+ # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections
+ session.run(
+ "isort",
+ "--fss",
+ *LINT_PATHS,
+ )
+ session.run(
+ "black",
+ *LINT_PATHS,
+ )
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def lint_setup_py(session):
+ """Verify that setup.py is valid (including RST check)."""
+ session.install("docutils", "pygments", "setuptools>=79.0.1")
+ session.run("python", "setup.py", "check", "--restructuredtext", "--strict")
+
+
+def install_unittest_dependencies(session, *constraints):
+ standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES
+ session.install(*standard_deps, *constraints)
+
+ if UNIT_TEST_EXTERNAL_DEPENDENCIES:
+ warnings.warn(
+ "'unit_test_external_dependencies' is deprecated. Instead, please "
+ "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.",
+ DeprecationWarning,
+ )
+ session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints)
+
+ if UNIT_TEST_LOCAL_DEPENDENCIES:
+ session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints)
+
+ if UNIT_TEST_EXTRAS_BY_PYTHON:
+ extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, [])
+ elif UNIT_TEST_EXTRAS:
+ extras = UNIT_TEST_EXTRAS
+ else:
+ extras = []
+
+ if extras:
+ session.install("-e", f".[{','.join(extras)}]", *constraints)
+ else:
+ session.install("-e", ".", *constraints)
+
+ # XXX Work around Kokoro image's older pip, which borks the OT install.
+ session.run("pip", "install", "--upgrade", "pip")
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ session.install("-e", ".[tracing]", "-c", constraints_path)
+ # XXX: Dump installed versions to debug OT issue
+ session.run("pip", "list")
+
+
+@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
+@nox.parametrize(
+ "protobuf_implementation",
+ ["python", "upb", "cpp"],
+)
+def unit(session, protobuf_implementation):
+ # Install all test dependencies, then install this package in-place.
+
+ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
+ session.skip("cpp implementation is not supported in python 3.11+")
+
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ install_unittest_dependencies(session, "-c", constraints_path)
+
+ # TODO(https://github.com/googleapis/synthtool/issues/1976):
+ # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped.
+ # The 'cpp' implementation requires Protobuf<4.
+ if protobuf_implementation == "cpp":
+ session.install("protobuf<4")
+
+ # Run py.test against the unit tests.
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "--cov=google",
+ "--cov=tests/unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=0",
+ os.path.join("tests", "unit"),
+ *session.posargs,
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ },
+ )
+
+
+@nox.session(python=DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION)
+def mockserver(session):
+ # Install all test dependencies, then install this package in-place.
+
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ standard_deps = (
+ UNIT_TEST_STANDARD_DEPENDENCIES
+ + UNIT_TEST_DEPENDENCIES
+ + MOCK_SERVER_ADDITIONAL_DEPENDENCIES
+ )
+ session.install(*standard_deps, "-c", constraints_path)
+ session.install("-e", ".", "-c", constraints_path)
+
+ # Run py.test against the mockserver tests.
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "--cov=google",
+ "--cov=tests/unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=0",
+ os.path.join("tests", "mockserver_tests"),
+ *session.posargs,
+ )
+
+
+def install_systemtest_dependencies(session, *constraints):
+ # Use pre-release gRPC for system tests.
+ # Exclude version 1.52.0rc1 which has a known issue.
+ # See https://github.com/grpc/grpc/issues/32163
+ session.install("--pre", "grpcio!=1.52.0rc1")
+
+ session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints)
+
+ if SYSTEM_TEST_EXTERNAL_DEPENDENCIES:
+ session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints)
+
+ if SYSTEM_TEST_LOCAL_DEPENDENCIES:
+ session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints)
+
+ if SYSTEM_TEST_DEPENDENCIES:
+ session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints)
+
+ if SYSTEM_TEST_EXTRAS_BY_PYTHON:
+ extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, [])
+ elif SYSTEM_TEST_EXTRAS:
+ extras = SYSTEM_TEST_EXTRAS
+ else:
+ extras = []
+
+ if extras:
+ session.install("-e", f".[{','.join(extras)}]", *constraints)
+ else:
+ session.install("-e", ".", *constraints)
+
+
+@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
+@nox.parametrize(
+ "protobuf_implementation,database_dialect",
+ [
+ ("python", "GOOGLE_STANDARD_SQL"),
+ ("python", "POSTGRESQL"),
+ ("upb", "GOOGLE_STANDARD_SQL"),
+ ("upb", "POSTGRESQL"),
+ ("cpp", "GOOGLE_STANDARD_SQL"),
+ ("cpp", "POSTGRESQL"),
+ ],
+)
+def system(session, protobuf_implementation, database_dialect):
+ """Run the system test suite."""
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
+ system_test_path = os.path.join("tests", "system.py")
+ system_test_folder_path = os.path.join("tests", "system")
+
+ # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
+ if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
+ session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
+ # Sanity check: Only run tests if the environment variable is set.
+ if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", "") and not os.environ.get(
+ "SPANNER_EMULATOR_HOST", ""
+ ):
+ session.skip(
+ "Credentials or emulator host must be set via environment variable"
+ )
+ if not (
+ os.environ.get("SPANNER_EMULATOR_HOST") or protobuf_implementation == "python"
+ ):
+ session.skip(
+ "Only run system tests on real Spanner with one protobuf implementation to speed up the build"
+ )
+
+ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
+ session.skip("cpp implementation is not supported in python 3.11+")
+
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
+
+ system_test_exists = os.path.exists(system_test_path)
+ system_test_folder_exists = os.path.exists(system_test_folder_path)
+ # Sanity check: only run tests if found.
+ if not system_test_exists and not system_test_folder_exists:
+ session.skip("System tests were not found")
+
+ install_systemtest_dependencies(session, "-c", constraints_path)
+
+ # TODO(https://github.com/googleapis/synthtool/issues/1976):
+ # Remove the 'cpp' implementation once support for Protobuf 3.x is dropped.
+ # The 'cpp' implementation requires Protobuf<4.
+ if protobuf_implementation == "cpp":
+ session.install("protobuf<4")
+
+ # Run py.test against the system tests.
+ if system_test_exists:
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ "SPANNER_DATABASE_DIALECT": database_dialect,
+ "SKIP_BACKUP_TESTS": "true",
+ },
+ )
+ elif system_test_folder_exists:
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ "SPANNER_DATABASE_DIALECT": database_dialect,
+ "SKIP_BACKUP_TESTS": "true",
+ },
+ )
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def cover(session):
+ """Run the final coverage report.
+
+ This outputs the coverage report aggregating coverage from the unit
+ test runs (not system test runs), and then erases coverage data.
+ """
+ session.install("coverage", "pytest-cov")
+ session.run("coverage", "report", "--show-missing", "--fail-under=98")
+
+ session.run("coverage", "erase")
+
+
+@nox.session(python="3.10")
+def docs(session):
+ """Build the docs for this library."""
+
+ session.install("-e", ".[tracing]")
+ session.install(
+ # We need to pin to specific versions of the `sphinxcontrib-*` packages
+ # which still support sphinx 4.x.
+ # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
+ # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
+ "sphinxcontrib-applehelp==1.0.4",
+ "sphinxcontrib-devhelp==1.0.2",
+ "sphinxcontrib-htmlhelp==2.0.1",
+ "sphinxcontrib-qthelp==1.0.3",
+ "sphinxcontrib-serializinghtml==1.1.5",
+ "sphinx==4.5.0",
+ "alabaster",
+ "recommonmark",
+ )
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-W", # warnings as errors
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
+
+
+@nox.session(python="3.10")
+def docfx(session):
+ """Build the docfx yaml files for this library."""
+
+ session.install("-e", ".[tracing]")
+ session.install(
+ # We need to pin to specific versions of the `sphinxcontrib-*` packages
+ # which still support sphinx 4.x.
+ # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344
+ # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345.
+ "sphinxcontrib-applehelp==1.0.4",
+ "sphinxcontrib-devhelp==1.0.2",
+ "sphinxcontrib-htmlhelp==2.0.1",
+ "sphinxcontrib-qthelp==1.0.3",
+ "sphinxcontrib-serializinghtml==1.1.5",
+ "gcp-sphinx-docfx-yaml",
+ "alabaster",
+ "recommonmark",
+ )
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-D",
+ (
+ "extensions=sphinx.ext.autodoc,"
+ "sphinx.ext.autosummary,"
+ "docfx_yaml.extension,"
+ "sphinx.ext.intersphinx,"
+ "sphinx.ext.coverage,"
+ "sphinx.ext.napoleon,"
+ "sphinx.ext.todo,"
+ "sphinx.ext.viewcode,"
+ "recommonmark"
+ ),
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
+
+
+@nox.session(python="3.13")
+@nox.parametrize(
+ "protobuf_implementation,database_dialect",
+ [
+ ("python", "GOOGLE_STANDARD_SQL"),
+ ("python", "POSTGRESQL"),
+ ("upb", "GOOGLE_STANDARD_SQL"),
+ ("upb", "POSTGRESQL"),
+ ("cpp", "GOOGLE_STANDARD_SQL"),
+ ("cpp", "POSTGRESQL"),
+ ],
+)
+def prerelease_deps(session, protobuf_implementation, database_dialect):
+ """Run all tests with prerelease versions of dependencies installed."""
+
+ if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
+ session.skip("cpp implementation is not supported in python 3.11+")
+
+ # Install all dependencies
+ session.install("-e", ".[all, tests, tracing]")
+ unit_deps_all = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_EXTERNAL_DEPENDENCIES
+ session.install(*unit_deps_all)
+ system_deps_all = (
+ SYSTEM_TEST_STANDARD_DEPENDENCIES + SYSTEM_TEST_EXTERNAL_DEPENDENCIES
+ )
+ session.install(*system_deps_all)
+
+ # Because we test minimum dependency versions on the minimum Python
+ # version, the first version we test with in the unit tests sessions has a
+ # constraints file containing all dependencies and extras.
+ with open(
+ CURRENT_DIRECTORY
+ / "testing"
+ / f"constraints-{UNIT_TEST_PYTHON_VERSIONS[0]}.txt",
+ encoding="utf-8",
+ ) as constraints_file:
+ constraints_text = constraints_file.read()
+
+ # Ignore leading whitespace and comment lines.
+ constraints_deps = [
+ match.group(1)
+ for match in re.finditer(
+ r"^\s*([a-zA-Z0-9._-]+)", constraints_text, flags=re.MULTILINE
+ )
+ ]
+
+ if constraints_deps:
+ session.install(*constraints_deps)
+
+ prerel_deps = [
+ "protobuf",
+ # dependency of grpc
+ "six",
+ "grpc-google-iam-v1",
+ "googleapis-common-protos",
+ "grpcio",
+ "grpcio-status",
+ "google-api-core",
+ "google-auth",
+ "proto-plus",
+ "google-cloud-testutils",
+ # dependencies of google-cloud-testutils"
+ "click",
+ ]
+
+ for dep in prerel_deps:
+ session.install("--pre", "--no-deps", "--upgrade", dep)
+
+ # Remaining dependencies
+ other_deps = [
+ "requests",
+ ]
+ session.install(*other_deps)
+
+ # Print out prerelease package versions
+ session.run(
+ "python", "-c", "import google.protobuf; print(google.protobuf.__version__)"
+ )
+ session.run("python", "-c", "import grpc; print(grpc.__version__)")
+ session.run("python", "-c", "import google.auth; print(google.auth.__version__)")
+
+ session.run(
+ "py.test",
+ "tests/unit",
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ "SPANNER_DATABASE_DIALECT": database_dialect,
+ "SKIP_BACKUP_TESTS": "true",
+ },
+ )
+
+ system_test_path = os.path.join("tests", "system.py")
+ system_test_folder_path = os.path.join("tests", "system")
+
+ # Only run system tests for one protobuf implementation on real Spanner to speed up the build.
+ if os.environ.get("SPANNER_EMULATOR_HOST") or protobuf_implementation == "python":
+ # Only run system tests if found.
+ if os.path.exists(system_test_path):
+ session.run(
+ "py.test",
+ "--verbose",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ "SPANNER_DATABASE_DIALECT": database_dialect,
+ "SKIP_BACKUP_TESTS": "true",
+ },
+ )
+ elif os.path.exists(system_test_folder_path):
+ session.run(
+ "py.test",
+ "--verbose",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ env={
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": protobuf_implementation,
+ "SPANNER_DATABASE_DIALECT": database_dialect,
+ "SKIP_BACKUP_TESTS": "true",
+ },
+ )
diff --git a/.librarian/generator-input/setup.py b/.librarian/generator-input/setup.py
new file mode 100644
index 0000000000..858982f783
--- /dev/null
+++ b/.librarian/generator-input/setup.py
@@ -0,0 +1,103 @@
+# -*- coding: utf-8 -*-
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import io
+import os
+
+import setuptools # type: ignore
+
+package_root = os.path.abspath(os.path.dirname(__file__))
+
+name = "google-cloud-spanner"
+
+
+description = "Google Cloud Spanner API client library"
+
+version = {}
+with open(os.path.join(package_root, "google/cloud/spanner_v1/gapic_version.py")) as fp:
+ exec(fp.read(), version)
+version = version["__version__"]
+
+if version[0] == "0":
+ release_status = "Development Status :: 4 - Beta"
+else:
+ release_status = "Development Status :: 5 - Production/Stable"
+
+dependencies = [
+ "google-api-core[grpc] >= 1.34.0, <3.0.0,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*",
+ "google-cloud-core >= 1.4.4, < 3.0.0",
+ "grpc-google-iam-v1 >= 0.12.4, <1.0.0",
+ "proto-plus >= 1.22.0, <2.0.0",
+ "sqlparse >= 0.4.4",
+ "proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'",
+ "protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5",
+ "grpc-interceptor >= 0.15.4",
+]
+extras = {
+ "tracing": [
+ "opentelemetry-api >= 1.22.0",
+ "opentelemetry-sdk >= 1.22.0",
+ "opentelemetry-semantic-conventions >= 0.43b0",
+ "opentelemetry-resourcedetector-gcp >= 1.8.0a0",
+ "google-cloud-monitoring >= 2.16.0",
+ "mmh3 >= 4.1.0 ",
+ ],
+ "libcst": "libcst >= 0.2.5",
+}
+
+url = "https://github.com/googleapis/python-spanner"
+
+package_root = os.path.abspath(os.path.dirname(__file__))
+
+readme_filename = os.path.join(package_root, "README.rst")
+with io.open(readme_filename, encoding="utf-8") as readme_file:
+ readme = readme_file.read()
+
+packages = [
+ package
+ for package in setuptools.find_namespace_packages()
+ if package.startswith("google")
+]
+
+setuptools.setup(
+ name=name,
+ version=version,
+ description=description,
+ long_description=readme,
+ author="Google LLC",
+ author_email="googleapis-packages@google.com",
+ license="Apache 2.0",
+ url=url,
+ classifiers=[
+ release_status,
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+ "Operating System :: OS Independent",
+ "Topic :: Internet",
+ ],
+ platforms="Posix; MacOS X; Windows",
+ packages=packages,
+ install_requires=dependencies,
+ extras_require=extras,
+ python_requires=">=3.9",
+ include_package_data=True,
+ zip_safe=False,
+)
diff --git a/.librarian/state.yaml b/.librarian/state.yaml
new file mode 100644
index 0000000000..2b8a475a0a
--- /dev/null
+++ b/.librarian/state.yaml
@@ -0,0 +1,54 @@
+image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:8e2c32496077054105bd06c54a59d6a6694287bc053588e24debe6da6920ad91
+libraries:
+ - id: google-cloud-spanner
+ version: 3.60.0
+ last_generated_commit: a17b84add8318f780fcc8a027815d5fee644b9f7
+ apis:
+ - path: google/spanner/admin/instance/v1
+ service_config: spanner.yaml
+ - path: google/spanner/admin/database/v1
+ service_config: spanner.yaml
+ - path: google/spanner/v1
+ service_config: spanner.yaml
+ source_roots:
+ - .
+ preserve_regex: []
+ remove_regex:
+ - ^google/cloud/spanner_v1/gapic_metadata.json
+ - ^google/cloud/spanner_v1/gapic_version.py
+ - ^google/cloud/spanner_v1/py.typed
+ - ^google/cloud/spanner_v1/services
+ - ^google/cloud/spanner_v1/types
+ - ^google/cloud/spanner_admin_database_v1
+ - ^google/cloud/spanner_admin_instance_v1
+ - ^tests/unit/gapic
+ - ^tests/__init__.py
+ - ^tests/unit/__init__.py
+ - ^.pre-commit-config.yaml
+ - ^.repo-metadata.json
+ - ^.trampolinerc
+ - ^LICENSE
+ - ^SECURITY.md
+ - ^mypy.ini
+ - ^noxfile.py
+ - ^renovate.json
+ - ^samples/AUTHORING_GUIDE.md
+ - ^samples/CONTRIBUTING.md
+ - ^samples/generated_samples
+ - ^scripts/fixup_
+ - ^setup.py
+ - ^testing/constraints-3.8
+ - ^testing/constraints-3.9
+ - ^testing/constraints-3.1
+ - ^docs/conf.py
+ - ^docs/_static
+ - ^docs/spanner_v1/types_.rst
+ - ^docs/_templates
+ - ^docs/spanner_v1/services_.rst
+ - ^docs/spanner_v1/spanner.rst
+ - ^docs/spanner_v1/types.rst
+ - ^docs/spanner_admin_database_v1
+ - ^docs/spanner_admin_instance_v1
+ - ^docs/multiprocessing.rst
+ - ^docs/summary_overview.md
+ tag_format: v{version}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b5bbe07325..0a5a487e85 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,33 @@
[1]: https://pypi.org/project/google-cloud-spanner/#history
+## [3.60.0](https://github.com/googleapis/python-spanner/compare/v3.59.0...v3.60.0) (2025-12-10)
+
+
+### Documentation
+
+* Update description for the BatchCreateSessionsRequest and Session ([e08260fe24b62313d7964572eeb963eb8c3c923f](https://github.com/googleapis/python-spanner/commit/e08260fe24b62313d7964572eeb963eb8c3c923f))
+* Update description for the IsolationLevel ([e08260fe24b62313d7964572eeb963eb8c3c923f](https://github.com/googleapis/python-spanner/commit/e08260fe24b62313d7964572eeb963eb8c3c923f))
+
+
+### Features
+
+* make built-in metrics enabled by default (#1459) ([64aebe7e3ecfec756435f7d102b36f5a41f7cc52](https://github.com/googleapis/python-spanner/commit/64aebe7e3ecfec756435f7d102b36f5a41f7cc52))
+* Add Spanner location API (#1457) ([e08260fe24b62313d7964572eeb963eb8c3c923f](https://github.com/googleapis/python-spanner/commit/e08260fe24b62313d7964572eeb963eb8c3c923f))
+* Add Send and Ack mutations for Queues ([e08260fe24b62313d7964572eeb963eb8c3c923f](https://github.com/googleapis/python-spanner/commit/e08260fe24b62313d7964572eeb963eb8c3c923f))
+* Add QueryAdvisorResult for query plan ([e08260fe24b62313d7964572eeb963eb8c3c923f](https://github.com/googleapis/python-spanner/commit/e08260fe24b62313d7964572eeb963eb8c3c923f))
+* add cloud.region, request_tag and transaction_tag in span attributes (#1449) ([d37fb80a39aea859059ae7d85adc75095a6e14e6](https://github.com/googleapis/python-spanner/commit/d37fb80a39aea859059ae7d85adc75095a6e14e6))
+* Exposing AutoscalingConfig in InstancePartition ([8b6f154085543953556acde161a739414988b7f0](https://github.com/googleapis/python-spanner/commit/8b6f154085543953556acde161a739414988b7f0))
+* enable OpenTelemetry metrics and tracing by default (#1410) ([bb5095dfb615159a575933a332382ba93ba4bbd1](https://github.com/googleapis/python-spanner/commit/bb5095dfb615159a575933a332382ba93ba4bbd1))
+* add support for experimental host (#1452) ([9535e5e096f6ab53f2817af4fd7ac1fa2ca71660](https://github.com/googleapis/python-spanner/commit/9535e5e096f6ab53f2817af4fd7ac1fa2ca71660))
+
+
+### Bug Fixes
+
+* Provide Spanner Option to disable metrics (#1460) ([f1ebc43ba4c1ee3a8ee77ae4b0b2468937f06b71](https://github.com/googleapis/python-spanner/commit/f1ebc43ba4c1ee3a8ee77ae4b0b2468937f06b71))
+* Deprecate credentials_file argument ([8b6f154085543953556acde161a739414988b7f0](https://github.com/googleapis/python-spanner/commit/8b6f154085543953556acde161a739414988b7f0))
+* configure keepAlive time for gRPC TCP connections (#1448) ([efb2833e52e54b096e552a4d91f94b017ac733bb](https://github.com/googleapis/python-spanner/commit/efb2833e52e54b096e552a4d91f94b017ac733bb))
+
## [3.59.0](https://github.com/googleapis/python-spanner/compare/v3.58.0...v3.59.0) (2025-10-18)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 76e9061cd2..60be7c4f93 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -226,12 +226,14 @@ We support:
- `Python 3.11`_
- `Python 3.12`_
- `Python 3.13`_
+- `Python 3.14`_
.. _Python 3.9: https://docs.python.org/3.9/
.. _Python 3.10: https://docs.python.org/3.10/
.. _Python 3.11: https://docs.python.org/3.11/
.. _Python 3.12: https://docs.python.org/3.12/
.. _Python 3.13: https://docs.python.org/3.13/
+.. _Python 3.14: https://docs.python.org/3.14/
Supported versions can be found in our ``noxfile.py`` `config`_.
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
index 6316a537f7..95e9c77fcf 100644
--- a/docs/_templates/layout.html
+++ b/docs/_templates/layout.html
@@ -20,8 +20,8 @@
{% endblock %}
-
- As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
diff --git a/docs/conf.py b/docs/conf.py
index 78e49ed55c..010a6b6cda 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
-# Copyright 2024 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
-# http://www.apache.org/licenses/LICENSE-2.0
+# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
@@ -13,6 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+#
# google-cloud-spanner documentation build configuration file
#
# This file is execfile()d with the current directory set to its
@@ -42,7 +43,7 @@
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.5.5"
+needs_sphinx = "4.5.0"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -80,9 +81,9 @@
root_doc = "index"
# General information about the project.
-project = "google-cloud-spanner"
-copyright = "2019, Google"
-author = "Google APIs"
+project = u"google-cloud-spanner"
+copyright = u"2025, Google, LLC"
+author = u"Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -156,7 +157,7 @@
html_theme_options = {
"description": "Google Cloud Client Libraries for google-cloud-spanner",
"github_user": "googleapis",
- "github_repo": "python-spanner",
+ "github_repo": "google-cloud-python",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
@@ -266,13 +267,13 @@
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
- #'papersize': 'letterpaper',
+ # 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
- #'pointsize': '10pt',
+ # 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
- #'preamble': '',
+ # 'preamble': '',
# Latex figure (float) alignment
- #'figure_align': 'htbp',
+ # 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
@@ -282,7 +283,7 @@
(
root_doc,
"google-cloud-spanner.tex",
- "google-cloud-spanner Documentation",
+ u"google-cloud-spanner Documentation",
author,
"manual",
)
diff --git a/google/cloud/spanner_admin_database_v1/gapic_version.py b/google/cloud/spanner_admin_database_v1/gapic_version.py
index 17acb3026a..992322a033 100644
--- a/google/cloud/spanner_admin_database_v1/gapic_version.py
+++ b/google/cloud/spanner_admin_database_v1/gapic_version.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2022 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-__version__ = "3.59.0" # {x-release-please-version}
+__version__ = "3.60.0" # {x-release-please-version}
diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py
index 689f6afe96..16a075d983 100644
--- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py
+++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py
@@ -81,9 +81,10 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is mutually exclusive with credentials.
+ This argument is mutually exclusive with credentials. This argument will be
+ removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py
index 8f31a1fb98..0888d9af16 100644
--- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py
+++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py
@@ -169,9 +169,10 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if a ``channel`` instance is provided.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if a ``channel`` instance is provided.
+ This argument will be removed in the next major version of this library.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if a ``channel`` instance is provided.
channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
@@ -276,6 +277,7 @@ def __init__(
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -305,9 +307,10 @@ def create_channel(
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is mutually exclusive with credentials.
+ This argument is mutually exclusive with credentials. This argument will be
+ removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py
index 5171d84d40..145c6ebf03 100644
--- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py
+++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py
@@ -166,8 +166,9 @@ def create_channel(
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
- be loaded with :func:`google.auth.load_credentials_from_file`.
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be
+ removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -218,9 +219,10 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if a ``channel`` instance is provided.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if a ``channel`` instance is provided.
+ This argument will be removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -325,6 +327,7 @@ def __init__(
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py
index df70fc5636..dfec442041 100644
--- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py
+++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/rest.py
@@ -1622,9 +1622,10 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if ``channel`` is provided. This argument will be
+ removed in the next major version of this library.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
diff --git a/google/cloud/spanner_admin_instance_v1/gapic_version.py b/google/cloud/spanner_admin_instance_v1/gapic_version.py
index 17acb3026a..992322a033 100644
--- a/google/cloud/spanner_admin_instance_v1/gapic_version.py
+++ b/google/cloud/spanner_admin_instance_v1/gapic_version.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2022 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-__version__ = "3.59.0" # {x-release-please-version}
+__version__ = "3.60.0" # {x-release-please-version}
diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py
index 5a737b69f7..d8c055d60e 100644
--- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py
+++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py
@@ -75,9 +75,10 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is mutually exclusive with credentials.
+ This argument is mutually exclusive with credentials. This argument will be
+ removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py
index ee5b765210..844a86fcc0 100644
--- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py
+++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py
@@ -178,9 +178,10 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if a ``channel`` instance is provided.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if a ``channel`` instance is provided.
+ This argument will be removed in the next major version of this library.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if a ``channel`` instance is provided.
channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
@@ -285,6 +286,7 @@ def __init__(
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -314,9 +316,10 @@ def create_channel(
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is mutually exclusive with credentials.
+ This argument is mutually exclusive with credentials. This argument will be
+ removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py
index f2df40d1f2..e6d2e48cb3 100644
--- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py
+++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py
@@ -175,8 +175,9 @@ def create_channel(
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
- be loaded with :func:`google.auth.load_credentials_from_file`.
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be
+ removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -227,9 +228,10 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if a ``channel`` instance is provided.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if a ``channel`` instance is provided.
+ This argument will be removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -334,6 +336,7 @@ def __init__(
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py
index ca32cafa99..feef4e8048 100644
--- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py
+++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/rest.py
@@ -1355,9 +1355,10 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if ``channel`` is provided. This argument will be
+ removed in the next major version of this library.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
diff --git a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py
index 1e1509d1c4..be1822b33c 100644
--- a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py
+++ b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py
@@ -1765,6 +1765,12 @@ class InstancePartition(proto.Message):
that are not yet in the ``READY`` state.
This field is a member of `oneof`_ ``compute_capacity``.
+ autoscaling_config (google.cloud.spanner_admin_instance_v1.types.AutoscalingConfig):
+ Optional. The autoscaling configuration. Autoscaling is
+ enabled if this field is set. When autoscaling is enabled,
+ fields in compute_capacity are treated as OUTPUT_ONLY fields
+ and reflect the current compute capacity allocated to the
+ instance partition.
state (google.cloud.spanner_admin_instance_v1.types.InstancePartition.State):
Output only. The current instance partition
state.
@@ -1848,6 +1854,11 @@ class State(proto.Enum):
number=6,
oneof="compute_capacity",
)
+ autoscaling_config: "AutoscalingConfig" = proto.Field(
+ proto.MESSAGE,
+ number=13,
+ message="AutoscalingConfig",
+ )
state: State = proto.Field(
proto.ENUM,
number=7,
diff --git a/google/cloud/spanner_dbapi/connection.py b/google/cloud/spanner_dbapi/connection.py
index db18f44067..111bc4cc1b 100644
--- a/google/cloud/spanner_dbapi/connection.py
+++ b/google/cloud/spanner_dbapi/connection.py
@@ -15,6 +15,7 @@
"""DB-API Connection for the Google Cloud Spanner."""
import warnings
+from google.api_core.client_options import ClientOptions
from google.api_core.exceptions import Aborted
from google.api_core.gapic_v1.client_info import ClientInfo
from google.auth.credentials import AnonymousCredentials
@@ -734,6 +735,7 @@ def connect(
client=None,
route_to_leader_enabled=True,
database_role=None,
+ experimental_host=None,
**kwargs,
):
"""Creates a connection to a Google Cloud Spanner database.
@@ -805,6 +807,10 @@ def connect(
client_options = None
if isinstance(credentials, AnonymousCredentials):
client_options = kwargs.get("client_options")
+ if experimental_host is not None:
+ project = "default"
+ credentials = AnonymousCredentials()
+ client_options = ClientOptions(api_endpoint=experimental_host)
client = spanner.Client(
project=project,
credentials=credentials,
diff --git a/google/cloud/spanner_dbapi/version.py b/google/cloud/spanner_dbapi/version.py
index 6fbb80eb90..ee7431572b 100644
--- a/google/cloud/spanner_dbapi/version.py
+++ b/google/cloud/spanner_dbapi/version.py
@@ -13,8 +13,8 @@
# limitations under the License.
import platform
-from google.cloud.spanner_v1 import gapic_version as package_version
PY_VERSION = platform.python_version()
-VERSION = package_version.__version__
+__version__ = "3.60.0"
+VERSION = __version__
DEFAULT_USER_AGENT = "gl-dbapi/" + VERSION
diff --git a/google/cloud/spanner_v1/_helpers.py b/google/cloud/spanner_v1/_helpers.py
index 00a69d462b..aa58c59199 100644
--- a/google/cloud/spanner_v1/_helpers.py
+++ b/google/cloud/spanner_v1/_helpers.py
@@ -20,6 +20,7 @@
import time
import base64
import threading
+import logging
from google.protobuf.struct_pb2 import ListValue
from google.protobuf.struct_pb2 import Value
@@ -29,16 +30,27 @@
from google.api_core import datetime_helpers
from google.api_core.exceptions import Aborted
from google.cloud._helpers import _date_from_iso8601_date
-from google.cloud.spanner_v1 import TypeCode
-from google.cloud.spanner_v1 import ExecuteSqlRequest
-from google.cloud.spanner_v1 import JsonObject, Interval
-from google.cloud.spanner_v1 import TransactionOptions
+from google.cloud.spanner_v1.types import ExecuteSqlRequest
+from google.cloud.spanner_v1.types import TransactionOptions
+from google.cloud.spanner_v1.data_types import JsonObject, Interval
from google.cloud.spanner_v1.request_id_header import with_request_id
+from google.cloud.spanner_v1.types import TypeCode
+
from google.rpc.error_details_pb2 import RetryInfo
try:
from opentelemetry.propagate import inject
from opentelemetry.propagators.textmap import Setter
+ from opentelemetry.semconv.resource import ResourceAttributes
+ from opentelemetry.resourcedetector import gcp_resource_detector
+ from opentelemetry.resourcedetector.gcp_resource_detector import (
+ GoogleCloudResourceDetector,
+ )
+
+ # Overwrite the requests timeout for the detector.
+ # This is necessary as the client will wait the full timeout if the
+ # code is not run in a GCP environment, with the location endpoints available.
+ gcp_resource_detector._TIMEOUT_SEC = 0.2
HAS_OPENTELEMETRY_INSTALLED = True
except ImportError:
@@ -55,6 +67,12 @@
+ "numeric has a whole component with precision {}"
)
+GOOGLE_CLOUD_REGION_GLOBAL = "global"
+
+log = logging.getLogger(__name__)
+
+_cloud_region: str = None
+
if HAS_OPENTELEMETRY_INSTALLED:
@@ -79,6 +97,33 @@ def set(self, carrier: List[Tuple[str, str]], key: str, value: str) -> None:
carrier.append((key, value))
+def _get_cloud_region() -> str:
+ """Get the location of the resource, caching the result.
+
+ Returns:
+ str: The location of the resource. If OpenTelemetry is not installed, returns a global region.
+ """
+ global _cloud_region
+ if _cloud_region is not None:
+ return _cloud_region
+
+ try:
+ detector = GoogleCloudResourceDetector()
+ resources = detector.detect()
+ if ResourceAttributes.CLOUD_REGION in resources.attributes:
+ _cloud_region = resources.attributes[ResourceAttributes.CLOUD_REGION]
+ else:
+ _cloud_region = GOOGLE_CLOUD_REGION_GLOBAL
+ except Exception as e:
+ log.warning(
+ "Failed to detect GCP resource location for Spanner metrics, defaulting to 'global'. Error: %s",
+ e,
+ )
+ _cloud_region = GOOGLE_CLOUD_REGION_GLOBAL
+
+ return _cloud_region
+
+
def _try_to_coerce_bytes(bytestring):
"""Try to coerce a byte string into the right thing based on Python
version and whether or not it is base64 encoded.
diff --git a/google/cloud/spanner_v1/_opentelemetry_tracing.py b/google/cloud/spanner_v1/_opentelemetry_tracing.py
index eafc983850..c95f896298 100644
--- a/google/cloud/spanner_v1/_opentelemetry_tracing.py
+++ b/google/cloud/spanner_v1/_opentelemetry_tracing.py
@@ -21,20 +21,16 @@
from google.cloud.spanner_v1 import SpannerClient
from google.cloud.spanner_v1 import gapic_version
from google.cloud.spanner_v1._helpers import (
+ _get_cloud_region,
_metadata_with_span_context,
)
-try:
- from opentelemetry import trace
- from opentelemetry.trace.status import Status, StatusCode
- from opentelemetry.semconv.attributes.otel_attributes import (
- OTEL_SCOPE_NAME,
- OTEL_SCOPE_VERSION,
- )
-
- HAS_OPENTELEMETRY_INSTALLED = True
-except ImportError:
- HAS_OPENTELEMETRY_INSTALLED = False
+from opentelemetry import trace
+from opentelemetry.trace.status import Status, StatusCode
+from opentelemetry.semconv.attributes.otel_attributes import (
+ OTEL_SCOPE_NAME,
+ OTEL_SCOPE_VERSION,
+)
from google.cloud.spanner_v1.metrics.metrics_capture import MetricsCapture
@@ -70,11 +66,6 @@ def trace_call(
if session:
session._last_use_time = datetime.now()
- if not (HAS_OPENTELEMETRY_INSTALLED and name):
- # Empty context manager. Users will have to check if the generated value is None or a span
- yield None
- return
-
tracer_provider = None
# By default enable_extended_tracing=True because in a bid to minimize
@@ -85,6 +76,7 @@ def trace_call(
enable_end_to_end_tracing = False
db_name = ""
+ cloud_region = None
if session and getattr(session, "_database", None):
db_name = session._database.name
@@ -98,6 +90,7 @@ def trace_call(
)
db_name = observability_options.get("db_name", db_name)
+ cloud_region = _get_cloud_region()
tracer = get_tracer(tracer_provider)
# Set base attributes that we know for every trace created
@@ -107,6 +100,7 @@ def trace_call(
"db.instance": db_name,
"net.host.name": SpannerClient.DEFAULT_ENDPOINT,
OTEL_SCOPE_NAME: TRACER_NAME,
+ "cloud.region": cloud_region,
OTEL_SCOPE_VERSION: TRACER_VERSION,
# Standard GCP attributes for OTel, attributes are used for internal purpose and are subjected to change
"gcp.client.service": "spanner",
@@ -117,6 +111,11 @@ def trace_call(
if extra_attributes:
attributes.update(extra_attributes)
+ if "request_options" in attributes:
+ request_options = attributes.pop("request_options")
+ if request_options and request_options.request_tag:
+ attributes["request.tag"] = request_options.request_tag
+
if extended_tracing_globally_disabled:
enable_extended_tracing = False
@@ -155,11 +154,8 @@ def trace_call(
def get_current_span():
- if not HAS_OPENTELEMETRY_INSTALLED:
- return None
return trace.get_current_span()
def add_span_event(span, event_name, event_attributes=None):
- if span:
- span.add_event(event_name, event_attributes)
+ span.add_event(event_name, event_attributes)
diff --git a/google/cloud/spanner_v1/client.py b/google/cloud/spanner_v1/client.py
index 6ebabbb34e..5f72905616 100644
--- a/google/cloud/spanner_v1/client.py
+++ b/google/cloud/spanner_v1/client.py
@@ -52,7 +52,6 @@
from google.cloud.spanner_v1._helpers import _metadata_with_prefix
from google.cloud.spanner_v1.instance import Instance
from google.cloud.spanner_v1.metrics.constants import (
- ENABLE_SPANNER_METRICS_ENV_VAR,
METRIC_EXPORT_INTERVAL_MS,
)
from google.cloud.spanner_v1.metrics.spanner_metrics_tracer_factory import (
@@ -75,7 +74,7 @@
_CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__)
EMULATOR_ENV_VAR = "SPANNER_EMULATOR_HOST"
-ENABLE_BUILTIN_METRICS_ENV_VAR = "SPANNER_ENABLE_BUILTIN_METRICS"
+SPANNER_DISABLE_BUILTIN_METRICS_ENV_VAR = "SPANNER_DISABLE_BUILTIN_METRICS"
_EMULATOR_HOST_HTTP_SCHEME = (
"%s contains a http scheme. When used with a scheme it may cause gRPC's "
"DNS resolver to endlessly attempt to resolve. %s is intended to be used "
@@ -101,8 +100,8 @@ def _get_spanner_optimizer_statistics_package():
log = logging.getLogger(__name__)
-def _get_spanner_enable_builtin_metrics():
- return os.getenv(ENABLE_SPANNER_METRICS_ENV_VAR) == "true"
+def _get_spanner_enable_builtin_metrics_env():
+ return os.getenv(SPANNER_DISABLE_BUILTIN_METRICS_ENV_VAR) != "true"
class Client(ClientWithProject):
@@ -176,6 +175,15 @@ class Client(ClientWithProject):
or :class:`dict`
:param default_transaction_options: (Optional) Default options to use for all transactions.
+ :type experimental_host: str
+ :param experimental_host: (Optional) The endpoint for a spanner experimental host deployment.
+ This is intended only for experimental host spanner endpoints.
+ If set, this will override the `api_endpoint` in `client_options`.
+
+ :type disable_builtin_metrics: bool
+ :param disable_builtin_metrics: (Optional) Default False. Set to True to disable
+ the Spanner built-in metrics collection and exporting.
+
:raises: :class:`ValueError
` if both ``read_only``
and ``admin`` are :data:`True`
"""
@@ -200,8 +208,11 @@ def __init__(
directed_read_options=None,
observability_options=None,
default_transaction_options: Optional[DefaultTransactionOptions] = None,
+ experimental_host=None,
+ disable_builtin_metrics=False,
):
self._emulator_host = _get_spanner_emulator_host()
+ self._experimental_host = experimental_host
if client_options and type(client_options) is dict:
self._client_options = google.api_core.client_options.from_dict(
@@ -212,6 +223,8 @@ def __init__(
if self._emulator_host:
credentials = AnonymousCredentials()
+ elif self._experimental_host:
+ credentials = AnonymousCredentials()
elif isinstance(credentials, AnonymousCredentials):
self._emulator_host = self._client_options.api_endpoint
@@ -240,7 +253,8 @@ def __init__(
warnings.warn(_EMULATOR_HOST_HTTP_SCHEME)
# Check flag to enable Spanner builtin metrics
if (
- _get_spanner_enable_builtin_metrics()
+ _get_spanner_enable_builtin_metrics_env()
+ and not disable_builtin_metrics
and HAS_GOOGLE_CLOUD_MONITORING_INSTALLED
):
meter_provider = metrics.NoOpMeterProvider()
@@ -324,6 +338,15 @@ def instance_admin_api(self):
client_options=self._client_options,
transport=transport,
)
+ elif self._experimental_host:
+ transport = InstanceAdminGrpcTransport(
+ channel=grpc.insecure_channel(target=self._experimental_host)
+ )
+ self._instance_admin_api = InstanceAdminClient(
+ client_info=self._client_info,
+ client_options=self._client_options,
+ transport=transport,
+ )
else:
self._instance_admin_api = InstanceAdminClient(
credentials=self.credentials,
@@ -345,6 +368,15 @@ def database_admin_api(self):
client_options=self._client_options,
transport=transport,
)
+ elif self._experimental_host:
+ transport = DatabaseAdminGrpcTransport(
+ channel=grpc.insecure_channel(target=self._experimental_host)
+ )
+ self._database_admin_api = DatabaseAdminClient(
+ client_info=self._client_info,
+ client_options=self._client_options,
+ transport=transport,
+ )
else:
self._database_admin_api = DatabaseAdminClient(
credentials=self.credentials,
@@ -485,6 +517,7 @@ def instance(
self._emulator_host,
labels,
processing_units,
+ self._experimental_host,
)
def list_instances(self, filter_="", page_size=None):
diff --git a/google/cloud/spanner_v1/database.py b/google/cloud/spanner_v1/database.py
index c5fc56bcc9..33c442602c 100644
--- a/google/cloud/spanner_v1/database.py
+++ b/google/cloud/spanner_v1/database.py
@@ -203,8 +203,11 @@ def __init__(
self._pool = pool
pool.bind(self)
+ is_experimental_host = self._instance.experimental_host is not None
- self._sessions_manager = DatabaseSessionsManager(self, pool)
+ self._sessions_manager = DatabaseSessionsManager(
+ self, pool, is_experimental_host
+ )
@classmethod
def from_pb(cls, database_pb, instance, pool=None):
@@ -449,6 +452,16 @@ def spanner_api(self):
client_info=client_info, transport=transport
)
return self._spanner_api
+ if self._instance.experimental_host is not None:
+ transport = SpannerGrpcTransport(
+ channel=grpc.insecure_channel(self._instance.experimental_host)
+ )
+ self._spanner_api = SpannerClient(
+ client_info=client_info,
+ transport=transport,
+ client_options=client_options,
+ )
+ return self._spanner_api
credentials = self._instance._client.credentials
if isinstance(credentials, google.auth.credentials.Scoped):
credentials = credentials.with_scopes((SPANNER_DATA_SCOPE,))
@@ -1012,8 +1025,14 @@ def run_in_transaction(self, func, *args, **kw):
reraises any non-ABORT exceptions raised by ``func``.
"""
observability_options = getattr(self, "observability_options", None)
+ transaction_tag = kw.get("transaction_tag")
+ extra_attributes = {}
+ if transaction_tag:
+ extra_attributes["transaction.tag"] = transaction_tag
+
with trace_call(
"CloudSpanner.Database.run_in_transaction",
+ extra_attributes=extra_attributes,
observability_options=observability_options,
), MetricsCapture():
# Sanity check: Is there a transaction already running?
diff --git a/google/cloud/spanner_v1/database_sessions_manager.py b/google/cloud/spanner_v1/database_sessions_manager.py
index aba32f21bd..bc0db1577c 100644
--- a/google/cloud/spanner_v1/database_sessions_manager.py
+++ b/google/cloud/spanner_v1/database_sessions_manager.py
@@ -62,9 +62,10 @@ class DatabaseSessionsManager(object):
_MAINTENANCE_THREAD_POLLING_INTERVAL = timedelta(minutes=10)
_MAINTENANCE_THREAD_REFRESH_INTERVAL = timedelta(days=7)
- def __init__(self, database, pool):
+ def __init__(self, database, pool, is_experimental_host: bool = False):
self._database = database
self._pool = pool
+ self._is_experimental_host = is_experimental_host
# Declare multiplexed session attributes. When a multiplexed session for the
# database session manager is created, a maintenance thread is initialized to
@@ -88,7 +89,7 @@ def get_session(self, transaction_type: TransactionType) -> Session:
session = (
self._get_multiplexed_session()
- if self._use_multiplexed(transaction_type)
+ if self._use_multiplexed(transaction_type) or self._is_experimental_host
else self._pool.get()
)
diff --git a/google/cloud/spanner_v1/gapic_version.py b/google/cloud/spanner_v1/gapic_version.py
index 17acb3026a..992322a033 100644
--- a/google/cloud/spanner_v1/gapic_version.py
+++ b/google/cloud/spanner_v1/gapic_version.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright 2022 Google LLC
+# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-__version__ = "3.59.0" # {x-release-please-version}
+__version__ = "3.60.0" # {x-release-please-version}
diff --git a/google/cloud/spanner_v1/instance.py b/google/cloud/spanner_v1/instance.py
index a67e0e630b..0d05699728 100644
--- a/google/cloud/spanner_v1/instance.py
+++ b/google/cloud/spanner_v1/instance.py
@@ -122,6 +122,7 @@ def __init__(
emulator_host=None,
labels=None,
processing_units=None,
+ experimental_host=None,
):
self.instance_id = instance_id
self._client = client
@@ -142,6 +143,7 @@ def __init__(
self._node_count = processing_units // PROCESSING_UNITS_PER_NODE
self.display_name = display_name or instance_id
self.emulator_host = emulator_host
+ self.experimental_host = experimental_host
if labels is None:
labels = {}
self.labels = labels
diff --git a/google/cloud/spanner_v1/metrics/constants.py b/google/cloud/spanner_v1/metrics/constants.py
index a47aecc9ed..a5f709881b 100644
--- a/google/cloud/spanner_v1/metrics/constants.py
+++ b/google/cloud/spanner_v1/metrics/constants.py
@@ -20,7 +20,6 @@
GOOGLE_CLOUD_REGION_KEY = "cloud.region"
GOOGLE_CLOUD_REGION_GLOBAL = "global"
SPANNER_METHOD_PREFIX = "/google.spanner.v1."
-ENABLE_SPANNER_METRICS_ENV_VAR = "SPANNER_ENABLE_BUILTIN_METRICS"
# Monitored resource labels
MONITORED_RES_LABEL_KEY_PROJECT = "project_id"
diff --git a/google/cloud/spanner_v1/metrics/spanner_metrics_tracer_factory.py b/google/cloud/spanner_v1/metrics/spanner_metrics_tracer_factory.py
index 881a5bfca9..9566e61a28 100644
--- a/google/cloud/spanner_v1/metrics/spanner_metrics_tracer_factory.py
+++ b/google/cloud/spanner_v1/metrics/spanner_metrics_tracer_factory.py
@@ -18,20 +18,9 @@
from .metrics_tracer_factory import MetricsTracerFactory
import os
import logging
-from .constants import (
- SPANNER_SERVICE_NAME,
- GOOGLE_CLOUD_REGION_KEY,
- GOOGLE_CLOUD_REGION_GLOBAL,
-)
+from .constants import SPANNER_SERVICE_NAME
try:
- from opentelemetry.resourcedetector import gcp_resource_detector
-
- # Overwrite the requests timeout for the detector.
- # This is necessary as the client will wait the full timeout if the
- # code is not run in a GCP environment, with the location endpoints available.
- gcp_resource_detector._TIMEOUT_SEC = 0.2
-
import mmh3
logging.getLogger("opentelemetry.resourcedetector.gcp_resource_detector").setLevel(
@@ -44,6 +33,7 @@
from .metrics_tracer import MetricsTracer
from google.cloud.spanner_v1 import __version__
+from google.cloud.spanner_v1._helpers import _get_cloud_region
from uuid import uuid4
log = logging.getLogger(__name__)
@@ -86,7 +76,7 @@ def __new__(
cls._metrics_tracer_factory.set_client_hash(
cls._generate_client_hash(client_uid)
)
- cls._metrics_tracer_factory.set_location(cls._get_location())
+ cls._metrics_tracer_factory.set_location(_get_cloud_region())
cls._metrics_tracer_factory.gfe_enabled = gfe_enabled
if cls._metrics_tracer_factory.enabled != enabled:
@@ -153,28 +143,3 @@ def _generate_client_hash(client_uid: str) -> str:
# Return as 6 digit zero padded hex string
return f"{sig_figs:06x}"
-
- @staticmethod
- def _get_location() -> str:
- """Get the location of the resource.
-
- In case of any error during detection, this method will log a warning
- and default to the "global" location.
-
- Returns:
- str: The location of the resource. If OpenTelemetry is not installed, returns a global region.
- """
- if not HAS_OPENTELEMETRY_INSTALLED:
- return GOOGLE_CLOUD_REGION_GLOBAL
- try:
- detector = gcp_resource_detector.GoogleCloudResourceDetector()
- resources = detector.detect()
-
- if GOOGLE_CLOUD_REGION_KEY in resources.attributes:
- return resources.attributes[GOOGLE_CLOUD_REGION_KEY]
- except Exception as e:
- log.warning(
- "Failed to detect GCP resource location for Spanner metrics, defaulting to 'global'. Error: %s",
- e,
- )
- return GOOGLE_CLOUD_REGION_GLOBAL
diff --git a/google/cloud/spanner_v1/request_id_header.py b/google/cloud/spanner_v1/request_id_header.py
index b540b725f5..95c25b94f7 100644
--- a/google/cloud/spanner_v1/request_id_header.py
+++ b/google/cloud/spanner_v1/request_id_header.py
@@ -43,7 +43,7 @@ def with_request_id(
all_metadata = (other_metadata or []).copy()
all_metadata.append((REQ_ID_HEADER_KEY, req_id))
- if span is not None:
+ if span:
span.set_attribute(X_GOOG_SPANNER_REQUEST_ID_SPAN_ATTR, req_id)
return all_metadata
diff --git a/google/cloud/spanner_v1/services/spanner/async_client.py b/google/cloud/spanner_v1/services/spanner/async_client.py
index c48b62d532..b197172a8a 100644
--- a/google/cloud/spanner_v1/services/spanner/async_client.py
+++ b/google/cloud/spanner_v1/services/spanner/async_client.py
@@ -49,6 +49,7 @@
from google.cloud.spanner_v1.services.spanner import pagers
from google.cloud.spanner_v1.types import commit_response
+from google.cloud.spanner_v1.types import location
from google.cloud.spanner_v1.types import mutation
from google.cloud.spanner_v1.types import result_set
from google.cloud.spanner_v1.types import spanner
@@ -477,10 +478,11 @@ async def sample_batch_create_sessions():
should not be set.
session_count (:class:`int`):
Required. The number of sessions to be created in this
- batch call. The API can return fewer than the requested
- number of sessions. If a specific number of sessions are
- desired, the client can make additional calls to
- ``BatchCreateSessions`` (adjusting
+ batch call. At least one session is created. The API can
+ return fewer than the requested number of sessions. If a
+ specific number of sessions are desired, the client can
+ make additional calls to ``BatchCreateSessions``
+ (adjusting
[session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count]
as necessary).
diff --git a/google/cloud/spanner_v1/services/spanner/client.py b/google/cloud/spanner_v1/services/spanner/client.py
index 82dbf8375e..d542dd89ef 100644
--- a/google/cloud/spanner_v1/services/spanner/client.py
+++ b/google/cloud/spanner_v1/services/spanner/client.py
@@ -64,6 +64,7 @@
from google.cloud.spanner_v1.services.spanner import pagers
from google.cloud.spanner_v1.types import commit_response
+from google.cloud.spanner_v1.types import location
from google.cloud.spanner_v1.types import mutation
from google.cloud.spanner_v1.types import result_set
from google.cloud.spanner_v1.types import spanner
@@ -922,10 +923,11 @@ def sample_batch_create_sessions():
should not be set.
session_count (int):
Required. The number of sessions to be created in this
- batch call. The API can return fewer than the requested
- number of sessions. If a specific number of sessions are
- desired, the client can make additional calls to
- ``BatchCreateSessions`` (adjusting
+ batch call. At least one session is created. The API can
+ return fewer than the requested number of sessions. If a
+ specific number of sessions are desired, the client can
+ make additional calls to ``BatchCreateSessions``
+ (adjusting
[session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count]
as necessary).
diff --git a/google/cloud/spanner_v1/services/spanner/transports/base.py b/google/cloud/spanner_v1/services/spanner/transports/base.py
index d1dfe07291..3e68439cd7 100644
--- a/google/cloud/spanner_v1/services/spanner/transports/base.py
+++ b/google/cloud/spanner_v1/services/spanner/transports/base.py
@@ -76,9 +76,10 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is mutually exclusive with credentials.
+ This argument is mutually exclusive with credentials. This argument will be
+ removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py
index 8b377d7725..0d0613152f 100644
--- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py
+++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py
@@ -160,9 +160,10 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if a ``channel`` instance is provided.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if a ``channel`` instance is provided.
+ This argument will be removed in the next major version of this library.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if a ``channel`` instance is provided.
channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]):
@@ -267,6 +268,7 @@ def __init__(
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -303,9 +305,10 @@ def create_channel(
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is mutually exclusive with credentials.
+ This argument is mutually exclusive with credentials. This argument will be
+ removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py
index 2c6cec52a9..4f492c7f44 100644
--- a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py
+++ b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py
@@ -156,8 +156,9 @@ def create_channel(
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
- be loaded with :func:`google.auth.load_credentials_from_file`.
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be
+ removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -209,9 +210,10 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if a ``channel`` instance is provided.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if a ``channel`` instance is provided.
+ This argument will be removed in the next major version of this library.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
@@ -315,6 +317,7 @@ def __init__(
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
diff --git a/google/cloud/spanner_v1/services/spanner/transports/rest.py b/google/cloud/spanner_v1/services/spanner/transports/rest.py
index 7b49a0d76a..721e9929b3 100644
--- a/google/cloud/spanner_v1/services/spanner/transports/rest.py
+++ b/google/cloud/spanner_v1/services/spanner/transports/rest.py
@@ -933,9 +933,10 @@ def __init__(
are specified, the client will attempt to ascertain the
credentials from the environment.
- credentials_file (Optional[str]): A file with credentials that can
+ credentials_file (Optional[str]): Deprecated. A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
- This argument is ignored if ``channel`` is provided.
+ This argument is ignored if ``channel`` is provided. This argument will be
+ removed in the next major version of this library.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client
diff --git a/google/cloud/spanner_v1/session.py b/google/cloud/spanner_v1/session.py
index 7b6634c728..4c29014e15 100644
--- a/google/cloud/spanner_v1/session.py
+++ b/google/cloud/spanner_v1/session.py
@@ -251,11 +251,9 @@ def exists(self):
span,
),
)
- if span:
- span.set_attribute("session_found", True)
+ span.set_attribute("session_found", True)
except NotFound:
- if span:
- span.set_attribute("session_found", False)
+ span.set_attribute("session_found", False)
return False
return True
@@ -317,18 +315,21 @@ def ping(self):
"""
if self._session_id is None:
raise ValueError("Session ID not set by back-end")
+
database = self._database
api = database.spanner_api
- request = ExecuteSqlRequest(session=self.name, sql="SELECT 1")
- api.execute_sql(
- request=request,
- metadata=database.metadata_with_request_id(
- database._next_nth_request,
- 1,
- _metadata_with_prefix(database.name),
- ),
- )
- self._last_use_time = datetime.now()
+
+ with trace_call("CloudSpanner.Session.ping", self) as span:
+ request = ExecuteSqlRequest(session=self.name, sql="SELECT 1")
+ api.execute_sql(
+ request=request,
+ metadata=database.metadata_with_request_id(
+ database._next_nth_request,
+ 1,
+ _metadata_with_prefix(database.name),
+ span,
+ ),
+ )
def snapshot(self, **kw):
"""Create a snapshot to perform a set of reads with shared staleness.
@@ -531,9 +532,14 @@ def run_in_transaction(self, func, *args, **kw):
database = self._database
log_commit_stats = database.log_commit_stats
+ extra_attributes = {}
+ if transaction_tag:
+ extra_attributes["transaction.tag"] = transaction_tag
+
with trace_call(
"CloudSpanner.Session.run_in_transaction",
self,
+ extra_attributes=extra_attributes,
observability_options=getattr(database, "observability_options", None),
) as span, MetricsCapture():
attempts: int = 0
@@ -566,20 +572,18 @@ def run_in_transaction(self, func, *args, **kw):
except Aborted as exc:
previous_transaction_id = txn._transaction_id
- if span:
- delay_seconds = _get_retry_delay(
- exc.errors[0],
- attempts,
- default_retry_delay=default_retry_delay,
- )
- attributes = dict(delay_seconds=delay_seconds, cause=str(exc))
- attributes.update(span_attributes)
- add_span_event(
- span,
- "Transaction was aborted in user operation, retrying",
- attributes,
- )
-
+ delay_seconds = _get_retry_delay(
+ exc.errors[0],
+ attempts,
+ default_retry_delay=default_retry_delay,
+ )
+ attributes = dict(delay_seconds=delay_seconds, cause=str(exc))
+ attributes.update(span_attributes)
+ add_span_event(
+ span,
+ "Transaction was aborted in user operation, retrying",
+ attributes,
+ )
_delay_until_retry(
exc, deadline, attempts, default_retry_delay=default_retry_delay
)
@@ -611,20 +615,18 @@ def run_in_transaction(self, func, *args, **kw):
except Aborted as exc:
previous_transaction_id = txn._transaction_id
- if span:
- delay_seconds = _get_retry_delay(
- exc.errors[0],
- attempts,
- default_retry_delay=default_retry_delay,
- )
- attributes = dict(delay_seconds=delay_seconds)
- attributes.update(span_attributes)
- add_span_event(
- span,
- "Transaction was aborted during commit, retrying",
- attributes,
- )
-
+ delay_seconds = _get_retry_delay(
+ exc.errors[0],
+ attempts,
+ default_retry_delay=default_retry_delay,
+ )
+ attributes = dict(delay_seconds=delay_seconds)
+ attributes.update(span_attributes)
+ add_span_event(
+ span,
+ "Transaction was aborted during commit, retrying",
+ attributes,
+ )
_delay_until_retry(
exc, deadline, attempts, default_retry_delay=default_retry_delay
)
diff --git a/google/cloud/spanner_v1/snapshot.py b/google/cloud/spanner_v1/snapshot.py
index 5633cd4486..46b0f5af8d 100644
--- a/google/cloud/spanner_v1/snapshot.py
+++ b/google/cloud/spanner_v1/snapshot.py
@@ -409,7 +409,11 @@ def read(
method=streaming_read_method,
request=read_request,
metadata=metadata,
- trace_attributes={"table_id": table, "columns": columns},
+ trace_attributes={
+ "table_id": table,
+ "columns": columns,
+ "request_options": request_options,
+ },
column_info=column_info,
lazy_decode=lazy_decode,
)
@@ -601,7 +605,7 @@ def execute_sql(
method=execute_streaming_sql_method,
request=execute_sql_request,
metadata=metadata,
- trace_attributes={"db.statement": sql},
+ trace_attributes={"db.statement": sql, "request_options": request_options},
column_info=column_info,
lazy_decode=lazy_decode,
)
diff --git a/google/cloud/spanner_v1/testing/database_test.py b/google/cloud/spanner_v1/testing/database_test.py
index 5af89fea42..f3f71d6e85 100644
--- a/google/cloud/spanner_v1/testing/database_test.py
+++ b/google/cloud/spanner_v1/testing/database_test.py
@@ -86,6 +86,18 @@ def spanner_api(self):
transport=transport,
)
return self._spanner_api
+ if self._instance.experimental_host is not None:
+ channel = grpc.insecure_channel(self._instance.experimental_host)
+ self._x_goog_request_id_interceptor = XGoogRequestIDHeaderInterceptor()
+ self._interceptors.append(self._x_goog_request_id_interceptor)
+ channel = grpc.intercept_channel(channel, *self._interceptors)
+ transport = SpannerGrpcTransport(channel=channel)
+ self._spanner_api = SpannerClient(
+ client_info=client_info,
+ transport=transport,
+ client_options=client_options,
+ )
+ return self._spanner_api
credentials = client.credentials
if isinstance(credentials, google.auth.credentials.Scoped):
credentials = credentials.with_scopes((SPANNER_DATA_SCOPE,))
diff --git a/google/cloud/spanner_v1/transaction.py b/google/cloud/spanner_v1/transaction.py
index 5dd54eafe1..b9e14a0040 100644
--- a/google/cloud/spanner_v1/transaction.py
+++ b/google/cloud/spanner_v1/transaction.py
@@ -479,7 +479,10 @@ def execute_update(
request_options = RequestOptions(request_options)
request_options.transaction_tag = self.transaction_tag
- trace_attributes = {"db.statement": dml}
+ trace_attributes = {
+ "db.statement": dml,
+ "request_options": request_options,
+ }
# If this request begins the transaction, we need to lock
# the transaction until the transaction ID is updated.
@@ -629,7 +632,8 @@ def batch_update(
trace_attributes = {
# Get just the queries from the DML statement batch
- "db.statement": ";".join([statement.sql for statement in parsed])
+ "db.statement": ";".join([statement.sql for statement in parsed]),
+ "request_options": request_options,
}
# If this request begins the transaction, we need to lock
diff --git a/google/cloud/spanner_v1/types/__init__.py b/google/cloud/spanner_v1/types/__init__.py
index e2f87d65da..5a7ded16dd 100644
--- a/google/cloud/spanner_v1/types/__init__.py
+++ b/google/cloud/spanner_v1/types/__init__.py
@@ -23,11 +23,21 @@
KeyRange,
KeySet,
)
+from .location import (
+ CacheUpdate,
+ Group,
+ KeyRecipe,
+ Range,
+ RecipeList,
+ RoutingHint,
+ Tablet,
+)
from .mutation import (
Mutation,
)
from .query_plan import (
PlanNode,
+ QueryAdvisorResult,
QueryPlan,
)
from .result_set import (
@@ -80,8 +90,16 @@
"CommitResponse",
"KeyRange",
"KeySet",
+ "CacheUpdate",
+ "Group",
+ "KeyRecipe",
+ "Range",
+ "RecipeList",
+ "RoutingHint",
+ "Tablet",
"Mutation",
"PlanNode",
+ "QueryAdvisorResult",
"QueryPlan",
"PartialResultSet",
"ResultSet",
diff --git a/google/cloud/spanner_v1/types/location.py b/google/cloud/spanner_v1/types/location.py
new file mode 100644
index 0000000000..1749e87aef
--- /dev/null
+++ b/google/cloud/spanner_v1/types/location.py
@@ -0,0 +1,677 @@
+# -*- coding: utf-8 -*-
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from __future__ import annotations
+
+from typing import MutableMapping, MutableSequence
+
+import proto # type: ignore
+
+from google.cloud.spanner_v1.types import type as gs_type
+from google.protobuf import struct_pb2 # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.spanner.v1",
+ manifest={
+ "Range",
+ "Tablet",
+ "Group",
+ "KeyRecipe",
+ "RecipeList",
+ "CacheUpdate",
+ "RoutingHint",
+ },
+)
+
+
+class Range(proto.Message):
+ r"""A ``Range`` represents a range of keys in a database. The keys
+ themselves are encoded in "sortable string format", also known as
+ ssformat. Consult Spanner's open source client libraries for details
+ on the encoding.
+
+ Each range represents a contiguous range of rows, possibly from
+ multiple tables/indexes. Each range is associated with a single
+ paxos group (known as a "group" throughout this API), a split (which
+ names the exact range within the group), and a generation that can
+ be used to determine whether a given ``Range`` represents a newer or
+ older location for the key range.
+
+ Attributes:
+ start_key (bytes):
+ The start key of the range, inclusive.
+ Encoded in "sortable string format" (ssformat).
+ limit_key (bytes):
+ The limit key of the range, exclusive.
+ Encoded in "sortable string format" (ssformat).
+ group_uid (int):
+ The UID of the paxos group where this range is stored. UIDs
+ are unique within the database. References
+ ``Group.group_uid``.
+ split_id (int):
+ A group can store multiple ranges of keys. Each key range is
+ named by an ID (the split ID). Within a group, split IDs are
+ unique. The ``split_id`` names the exact split in
+ ``group_uid`` where this range is stored.
+ generation (bytes):
+ ``generation`` indicates the freshness of the range
+ information contained in this proto. Generations can be
+ compared lexicographically; if generation A is greater than
+ generation B, then the ``Range`` corresponding to A is newer
+ than the ``Range`` corresponding to B, and should be used
+ preferentially.
+ """
+
+ start_key: bytes = proto.Field(
+ proto.BYTES,
+ number=1,
+ )
+ limit_key: bytes = proto.Field(
+ proto.BYTES,
+ number=2,
+ )
+ group_uid: int = proto.Field(
+ proto.UINT64,
+ number=3,
+ )
+ split_id: int = proto.Field(
+ proto.UINT64,
+ number=4,
+ )
+ generation: bytes = proto.Field(
+ proto.BYTES,
+ number=5,
+ )
+
+
+class Tablet(proto.Message):
+ r"""A ``Tablet`` represents a single replica of a ``Group``. A tablet is
+ served by a single server at a time, and can move between servers
+ due to server death or simply load balancing.
+
+ Attributes:
+ tablet_uid (int):
+ The UID of the tablet, unique within the database. Matches
+ the ``tablet_uids`` and ``leader_tablet_uid`` fields in
+ ``Group``.
+ server_address (str):
+ The address of the server that is serving
+ this tablet -- either an IP address or DNS
+ hostname and a port number.
+ location (str):
+ Where this tablet is located. In the Spanner
+ managed service, this is the name of a region,
+ such as "us-central1". In Spanner Omni, this is
+ a previously created location.
+ role (google.cloud.spanner_v1.types.Tablet.Role):
+ The role of the tablet.
+ incarnation (bytes):
+ ``incarnation`` indicates the freshness of the tablet
+ information contained in this proto. Incarnations can be
+ compared lexicographically; if incarnation A is greater than
+ incarnation B, then the ``Tablet`` corresponding to A is
+ newer than the ``Tablet`` corresponding to B, and should be
+ used preferentially.
+ distance (int):
+ Distances help the client pick the closest tablet out of the
+ list of tablets for a given request. Tablets with lower
+ distances should generally be preferred. Tablets with the
+ same distance are approximately equally close; the client
+ can choose arbitrarily.
+
+ Distances do not correspond precisely to expected latency,
+ geographical distance, or anything else. Distances should be
+ compared only between tablets of the same group; they are
+ not meaningful between different groups.
+
+ A value of zero indicates that the tablet may be in the same
+ zone as the client, and have minimum network latency. A
+ value less than or equal to five indicates that the tablet
+ is thought to be in the same region as the client, and may
+ have a few milliseconds of network latency. Values greater
+ than five are most likely in a different region, with
+ non-trivial network latency.
+
+ Clients should use the following algorithm:
+
+ - If the request is using a directed read, eliminate any
+ tablets that do not match the directed read's target zone
+ and/or replica type.
+ - (Read-write transactions only) Choose leader tablet if it
+ has an distance <=5.
+ - Group and sort tablets by distance. Choose a random tablet
+ with the lowest distance. If the request is not a directed
+ read, only consider replicas with distances <=5.
+ - Send the request to the fallback endpoint.
+
+ The tablet picked by this algorithm may be skipped, either
+ because it is marked as ``skip`` by the server or because
+ the corresponding server is unreachable, flow controlled,
+ etc. Skipped tablets should be added to the
+ ``skipped_tablet_uid`` field in ``RoutingHint``; the
+ algorithm above should then be re-run without including the
+ skipped tablet(s) to pick the next best tablet.
+ skip (bool):
+ If true, the tablet should not be chosen by the client.
+ Typically, this signals that the tablet is unhealthy in some
+ way. Tablets with ``skip`` set to true should be reported
+ back to the server in ``RoutingHint.skipped_tablet_uid``;
+ this cues the server to send updated information for this
+ tablet should it become usable again.
+ """
+
+ class Role(proto.Enum):
+ r"""Indicates the role of the tablet.
+
+ Values:
+ ROLE_UNSPECIFIED (0):
+ Not specified.
+ READ_WRITE (1):
+ The tablet can perform reads and (if elected
+ leader) writes.
+ READ_ONLY (2):
+ The tablet can only perform reads.
+ """
+ ROLE_UNSPECIFIED = 0
+ READ_WRITE = 1
+ READ_ONLY = 2
+
+ tablet_uid: int = proto.Field(
+ proto.UINT64,
+ number=1,
+ )
+ server_address: str = proto.Field(
+ proto.STRING,
+ number=2,
+ )
+ location: str = proto.Field(
+ proto.STRING,
+ number=3,
+ )
+ role: Role = proto.Field(
+ proto.ENUM,
+ number=4,
+ enum=Role,
+ )
+ incarnation: bytes = proto.Field(
+ proto.BYTES,
+ number=5,
+ )
+ distance: int = proto.Field(
+ proto.UINT32,
+ number=6,
+ )
+ skip: bool = proto.Field(
+ proto.BOOL,
+ number=7,
+ )
+
+
+class Group(proto.Message):
+ r"""A ``Group`` represents a paxos group in a database. A group is a set
+ of tablets that are replicated across multiple servers. Groups may
+ have a leader tablet. Groups store one (or sometimes more) ranges of
+ keys.
+
+ Attributes:
+ group_uid (int):
+ The UID of the paxos group, unique within the database.
+ Matches the ``group_uid`` field in ``Range``.
+ tablets (MutableSequence[google.cloud.spanner_v1.types.Tablet]):
+ A list of tablets that are part of the group. Note that this
+ list may not be exhaustive; it will only include tablets the
+ server considers useful to the client. The returned list is
+ ordered ascending by distance.
+
+ Tablet UIDs reference ``Tablet.tablet_uid``.
+ leader_index (int):
+ The last known leader tablet of the group as an index into
+ ``tablets``. May be negative if the group has no known
+ leader.
+ generation (bytes):
+ ``generation`` indicates the freshness of the group
+ information (including leader information) contained in this
+ proto. Generations can be compared lexicographically; if
+ generation A is greater than generation B, then the
+ ``Group`` corresponding to A is newer than the ``Group``
+ corresponding to B, and should be used preferentially.
+ """
+
+ group_uid: int = proto.Field(
+ proto.UINT64,
+ number=1,
+ )
+ tablets: MutableSequence["Tablet"] = proto.RepeatedField(
+ proto.MESSAGE,
+ number=2,
+ message="Tablet",
+ )
+ leader_index: int = proto.Field(
+ proto.INT32,
+ number=3,
+ )
+ generation: bytes = proto.Field(
+ proto.BYTES,
+ number=4,
+ )
+
+
+class KeyRecipe(proto.Message):
+ r"""A ``KeyRecipe`` provides the metadata required to translate reads,
+ mutations, and queries into a byte array in "sortable string format"
+ (ssformat)that can be used with ``Range``\ s to route requests. Note
+ that the client *must* tolerate ``KeyRecipe``\ s that appear to be
+ invalid, since the ``KeyRecipe`` format may change over time.
+ Requests with invalid ``KeyRecipe``\ s should be routed to a default
+ server.
+
+ This message has `oneof`_ fields (mutually exclusive fields).
+ For each oneof, at most one member field can be set at the same time.
+ Setting any member of the oneof automatically clears all other
+ members.
+
+ .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+ Attributes:
+ table_name (str):
+ A table name, matching the name from the
+ database schema.
+
+ This field is a member of `oneof`_ ``target``.
+ index_name (str):
+ An index name, matching the name from the
+ database schema.
+
+ This field is a member of `oneof`_ ``target``.
+ operation_uid (int):
+ The UID of a query, matching the UID from ``RoutingHint``.
+
+ This field is a member of `oneof`_ ``target``.
+ part (MutableSequence[google.cloud.spanner_v1.types.KeyRecipe.Part]):
+ Parts are in the order they should appear in
+ the encoded key.
+ """
+
+ class Part(proto.Message):
+ r"""An ssformat key is composed of a sequence of tag numbers and key
+ column values. ``Part`` represents a single tag or key column value.
+
+ This message has `oneof`_ fields (mutually exclusive fields).
+ For each oneof, at most one member field can be set at the same time.
+ Setting any member of the oneof automatically clears all other
+ members.
+
+ .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields
+
+ Attributes:
+ tag (int):
+ If non-zero, ``tag`` is the only field present in this
+ ``Part``. The part is encoded by appending ``tag`` to the
+ ssformat key.
+ order (google.cloud.spanner_v1.types.KeyRecipe.Part.Order):
+ Whether the key column is sorted ascending or descending.
+ Only present if ``tag`` is zero.
+ null_order (google.cloud.spanner_v1.types.KeyRecipe.Part.NullOrder):
+ How NULLs are represented in the encoded key part. Only
+ present if ``tag`` is zero.
+ type_ (google.cloud.spanner_v1.types.Type):
+ The type of the key part. Only present if ``tag`` is zero.
+ identifier (str):
+ ``identifier`` is the name of the column or query parameter.
+
+ This field is a member of `oneof`_ ``value_type``.
+ value (google.protobuf.struct_pb2.Value):
+ The constant value of the key part.
+ It is present when query uses a constant as a
+ part of the key.
+
+ This field is a member of `oneof`_ ``value_type``.
+ random (bool):
+ If true, the client is responsible to fill in
+ the value randomly. It's relevant only for the
+ INT64 type.
+
+ This field is a member of `oneof`_ ``value_type``.
+ struct_identifiers (MutableSequence[int]):
+ It is a repeated field to support fetching key columns from
+ nested structs, such as ``STRUCT`` query parameters.
+ """
+
+ class Order(proto.Enum):
+ r"""The remaining fields encode column values.
+
+ Values:
+ ORDER_UNSPECIFIED (0):
+ Default value, equivalent to ``ASCENDING``.
+ ASCENDING (1):
+ The key is ascending - corresponds to ``ASC`` in the schema
+ definition.
+ DESCENDING (2):
+ The key is descending - corresponds to ``DESC`` in the
+ schema definition.
+ """
+ ORDER_UNSPECIFIED = 0
+ ASCENDING = 1
+ DESCENDING = 2
+
+ class NullOrder(proto.Enum):
+ r"""The null order of the key column. This dictates where NULL values
+ sort in the sorted order. Note that columns which are ``NOT NULL``
+ can have a special encoding.
+
+ Values:
+ NULL_ORDER_UNSPECIFIED (0):
+ Default value. This value is unused.
+ NULLS_FIRST (1):
+ NULL values sort before any non-NULL values.
+ NULLS_LAST (2):
+ NULL values sort after any non-NULL values.
+ NOT_NULL (3):
+ The column does not support NULL values.
+ """
+ NULL_ORDER_UNSPECIFIED = 0
+ NULLS_FIRST = 1
+ NULLS_LAST = 2
+ NOT_NULL = 3
+
+ tag: int = proto.Field(
+ proto.UINT32,
+ number=1,
+ )
+ order: "KeyRecipe.Part.Order" = proto.Field(
+ proto.ENUM,
+ number=2,
+ enum="KeyRecipe.Part.Order",
+ )
+ null_order: "KeyRecipe.Part.NullOrder" = proto.Field(
+ proto.ENUM,
+ number=3,
+ enum="KeyRecipe.Part.NullOrder",
+ )
+ type_: gs_type.Type = proto.Field(
+ proto.MESSAGE,
+ number=4,
+ message=gs_type.Type,
+ )
+ identifier: str = proto.Field(
+ proto.STRING,
+ number=5,
+ oneof="value_type",
+ )
+ value: struct_pb2.Value = proto.Field(
+ proto.MESSAGE,
+ number=6,
+ oneof="value_type",
+ message=struct_pb2.Value,
+ )
+ random: bool = proto.Field(
+ proto.BOOL,
+ number=8,
+ oneof="value_type",
+ )
+ struct_identifiers: MutableSequence[int] = proto.RepeatedField(
+ proto.INT32,
+ number=7,
+ )
+
+ table_name: str = proto.Field(
+ proto.STRING,
+ number=1,
+ oneof="target",
+ )
+ index_name: str = proto.Field(
+ proto.STRING,
+ number=2,
+ oneof="target",
+ )
+ operation_uid: int = proto.Field(
+ proto.UINT64,
+ number=3,
+ oneof="target",
+ )
+ part: MutableSequence[Part] = proto.RepeatedField(
+ proto.MESSAGE,
+ number=4,
+ message=Part,
+ )
+
+
+class RecipeList(proto.Message):
+ r"""A ``RecipeList`` contains a list of ``KeyRecipe``\ s, which share
+ the same schema generation.
+
+ Attributes:
+ schema_generation (bytes):
+ The schema generation of the recipes. To be sent to the
+ server in ``RoutingHint.schema_generation`` whenever one of
+ the recipes is used. ``schema_generation`` values are
+ comparable with each other; if generation A compares greater
+ than generation B, then A is a more recent schema than B.
+ Clients should in general aim to cache only the latest
+ schema generation, and discard more stale recipes.
+ recipe (MutableSequence[google.cloud.spanner_v1.types.KeyRecipe]):
+ A list of recipes to be cached.
+ """
+
+ schema_generation: bytes = proto.Field(
+ proto.BYTES,
+ number=1,
+ )
+ recipe: MutableSequence["KeyRecipe"] = proto.RepeatedField(
+ proto.MESSAGE,
+ number=3,
+ message="KeyRecipe",
+ )
+
+
+class CacheUpdate(proto.Message):
+ r"""A ``CacheUpdate`` expresses a set of changes the client should
+ incorporate into its location cache. These changes may or may not be
+ newer than what the client has in its cache, and should be discarded
+ if necessary. ``CacheUpdate``\ s can be obtained in response to
+ requests that included a ``RoutingHint`` field, but may also be
+ obtained by explicit location-fetching RPCs which may be added in
+ the future.
+
+ Attributes:
+ database_id (int):
+ An internal ID for the database. Database
+ names can be reused if a database is deleted and
+ re-created. Each time the database is
+ re-created, it will get a new database ID, which
+ will never be re-used for any other database.
+ range_ (MutableSequence[google.cloud.spanner_v1.types.Range]):
+ A list of ranges to be cached.
+ group (MutableSequence[google.cloud.spanner_v1.types.Group]):
+ A list of groups to be cached.
+ key_recipes (google.cloud.spanner_v1.types.RecipeList):
+ A list of recipes to be cached.
+ """
+
+ database_id: int = proto.Field(
+ proto.UINT64,
+ number=1,
+ )
+ range_: MutableSequence["Range"] = proto.RepeatedField(
+ proto.MESSAGE,
+ number=2,
+ message="Range",
+ )
+ group: MutableSequence["Group"] = proto.RepeatedField(
+ proto.MESSAGE,
+ number=3,
+ message="Group",
+ )
+ key_recipes: "RecipeList" = proto.Field(
+ proto.MESSAGE,
+ number=5,
+ message="RecipeList",
+ )
+
+
+class RoutingHint(proto.Message):
+ r"""``RoutingHint`` can be optionally added to location-aware Spanner
+ requests. It gives the server hints that can be used to route the
+ request to an appropriate server, potentially significantly
+ decreasing latency and improving throughput. To achieve improved
+ performance, most fields must be filled in with accurate values.
+
+ The presence of a valid ``RoutingHint`` tells the server that the
+ client is location-aware.
+
+ ``RoutingHint`` does not change the semantics of the request; it is
+ purely a performance hint; the request will perform the same actions
+ on the database's data as if ``RoutingHint`` were not present.
+ However, if the ``RoutingHint`` is incomplete or incorrect, the
+ response may include a ``CacheUpdate`` the client can use to correct
+ its location cache.
+
+ Attributes:
+ operation_uid (int):
+ A session-scoped unique ID for the operation, computed
+ client-side. Requests with the same ``operation_uid`` should
+ have a shared 'shape', meaning that some fields are expected
+ to be the same, such as the SQL query, the target
+ table/columns (for reads) etc. Requests with the same
+ ``operation_uid`` are meant to differ only in fields like
+ keys/key ranges/query parameters, transaction IDs, etc.
+
+ ``operation_uid`` must be non-zero for ``RoutingHint`` to be
+ valid.
+ database_id (int):
+ The database ID of the database being accessed, see
+ ``CacheUpdate.database_id``. Should match the cache entries
+ that were used to generate the rest of the fields in this
+ ``RoutingHint``.
+ schema_generation (bytes):
+ The schema generation of the recipe that was used to
+ generate ``key`` and ``limit_key``. See also
+ ``RecipeList.schema_generation``.
+ key (bytes):
+ The key / key range that this request accesses. For
+ operations that access a single key, ``key`` should be set
+ and ``limit_key`` should be empty. For operations that
+ access a key range, ``key`` and ``limit_key`` should both be
+ set, to the inclusive start and exclusive end of the range
+ respectively.
+
+ The keys are encoded in "sortable string format" (ssformat),
+ using a ``KeyRecipe`` that is appropriate for the request.
+ See ``KeyRecipe`` for more details.
+ limit_key (bytes):
+ If this request targets a key range, this is the exclusive
+ end of the range. See ``key`` for more details.
+ group_uid (int):
+ The group UID of the group that the client believes serves
+ the range defined by ``key`` and ``limit_key``. See
+ ``Range.group_uid`` for more details.
+ split_id (int):
+ The split ID of the split that the client believes contains
+ the range defined by ``key`` and ``limit_key``. See
+ ``Range.split_id`` for more details.
+ tablet_uid (int):
+ The tablet UID of the tablet from group ``group_uid`` that
+ the client believes is best to serve this request. See
+ ``Group.local_tablet_uids`` and ``Group.leader_tablet_uid``.
+ skipped_tablet_uid (MutableSequence[google.cloud.spanner_v1.types.RoutingHint.SkippedTablet]):
+ If the client had multiple options for tablet selection, and
+ some of its first choices were unhealthy (e.g., the server
+ is unreachable, or ``Tablet.skip`` is true), this field will
+ contain the tablet UIDs of those tablets, with their
+ incarnations. The server may include a ``CacheUpdate`` with
+ new locations for those tablets.
+ client_location (str):
+ If present, the client's current location. In
+ the Spanner managed service, this should be the
+ name of a Google Cloud zone or region, such as
+ "us-central1". In Spanner Omni, this should
+ correspond to a previously created location.
+
+ If absent, the client's location will be assumed
+ to be the same as the location of the server the
+ client ends up connected to.
+
+ Locations are primarily valuable for clients
+ that connect from regions other than the ones
+ that contain the Spanner database.
+ """
+
+ class SkippedTablet(proto.Message):
+ r"""A tablet that was skipped by the client. See ``Tablet.tablet_uid``
+ and ``Tablet.incarnation``.
+
+ Attributes:
+ tablet_uid (int):
+ The tablet UID of the tablet that was skipped. See
+ ``Tablet.tablet_uid``.
+ incarnation (bytes):
+ The incarnation of the tablet that was skipped. See
+ ``Tablet.incarnation``.
+ """
+
+ tablet_uid: int = proto.Field(
+ proto.UINT64,
+ number=1,
+ )
+ incarnation: bytes = proto.Field(
+ proto.BYTES,
+ number=2,
+ )
+
+ operation_uid: int = proto.Field(
+ proto.UINT64,
+ number=1,
+ )
+ database_id: int = proto.Field(
+ proto.UINT64,
+ number=2,
+ )
+ schema_generation: bytes = proto.Field(
+ proto.BYTES,
+ number=3,
+ )
+ key: bytes = proto.Field(
+ proto.BYTES,
+ number=4,
+ )
+ limit_key: bytes = proto.Field(
+ proto.BYTES,
+ number=5,
+ )
+ group_uid: int = proto.Field(
+ proto.UINT64,
+ number=6,
+ )
+ split_id: int = proto.Field(
+ proto.UINT64,
+ number=7,
+ )
+ tablet_uid: int = proto.Field(
+ proto.UINT64,
+ number=8,
+ )
+ skipped_tablet_uid: MutableSequence[SkippedTablet] = proto.RepeatedField(
+ proto.MESSAGE,
+ number=9,
+ message=SkippedTablet,
+ )
+ client_location: str = proto.Field(
+ proto.STRING,
+ number=10,
+ )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/spanner_v1/types/mutation.py b/google/cloud/spanner_v1/types/mutation.py
index 8389910fc0..3cbc3b937b 100644
--- a/google/cloud/spanner_v1/types/mutation.py
+++ b/google/cloud/spanner_v1/types/mutation.py
@@ -21,6 +21,7 @@
from google.cloud.spanner_v1.types import keys
from google.protobuf import struct_pb2 # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
@@ -89,6 +90,14 @@ class Mutation(proto.Message):
Delete rows from a table. Succeeds whether or
not the named rows were present.
+ This field is a member of `oneof`_ ``operation``.
+ send (google.cloud.spanner_v1.types.Mutation.Send):
+ Send a message to a queue.
+
+ This field is a member of `oneof`_ ``operation``.
+ ack (google.cloud.spanner_v1.types.Mutation.Ack):
+ Ack a message from a queue.
+
This field is a member of `oneof`_ ``operation``.
"""
@@ -166,6 +175,79 @@ class Delete(proto.Message):
message=keys.KeySet,
)
+ class Send(proto.Message):
+ r"""Arguments to [send][google.spanner.v1.Mutation.send] operations.
+
+ Attributes:
+ queue (str):
+ Required. The queue to which the message will
+ be sent.
+ key (google.protobuf.struct_pb2.ListValue):
+ Required. The primary key of the message to
+ be sent.
+ deliver_time (google.protobuf.timestamp_pb2.Timestamp):
+ The time at which Spanner will begin attempting to deliver
+ the message. If ``deliver_time`` is not set, Spanner will
+ deliver the message immediately. If ``deliver_time`` is in
+ the past, Spanner will replace it with a value closer to the
+ current time.
+ payload (google.protobuf.struct_pb2.Value):
+ The payload of the message.
+ """
+
+ queue: str = proto.Field(
+ proto.STRING,
+ number=1,
+ )
+ key: struct_pb2.ListValue = proto.Field(
+ proto.MESSAGE,
+ number=2,
+ message=struct_pb2.ListValue,
+ )
+ deliver_time: timestamp_pb2.Timestamp = proto.Field(
+ proto.MESSAGE,
+ number=3,
+ message=timestamp_pb2.Timestamp,
+ )
+ payload: struct_pb2.Value = proto.Field(
+ proto.MESSAGE,
+ number=4,
+ message=struct_pb2.Value,
+ )
+
+ class Ack(proto.Message):
+ r"""Arguments to [ack][google.spanner.v1.Mutation.ack] operations.
+
+ Attributes:
+ queue (str):
+ Required. The queue where the message to be
+ acked is stored.
+ key (google.protobuf.struct_pb2.ListValue):
+ Required. The primary key of the message to
+ be acked.
+ ignore_not_found (bool):
+ By default, an attempt to ack a message that does not exist
+ will fail with a ``NOT_FOUND`` error. With
+ ``ignore_not_found`` set to true, the ack will succeed even
+ if the message does not exist. This is useful for
+ unconditionally acking a message, even if it is missing or
+ has already been acked.
+ """
+
+ queue: str = proto.Field(
+ proto.STRING,
+ number=1,
+ )
+ key: struct_pb2.ListValue = proto.Field(
+ proto.MESSAGE,
+ number=2,
+ message=struct_pb2.ListValue,
+ )
+ ignore_not_found: bool = proto.Field(
+ proto.BOOL,
+ number=3,
+ )
+
insert: Write = proto.Field(
proto.MESSAGE,
number=1,
@@ -196,6 +278,18 @@ class Delete(proto.Message):
oneof="operation",
message=Delete,
)
+ send: Send = proto.Field(
+ proto.MESSAGE,
+ number=6,
+ oneof="operation",
+ message=Send,
+ )
+ ack: Ack = proto.Field(
+ proto.MESSAGE,
+ number=7,
+ oneof="operation",
+ message=Ack,
+ )
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/spanner_v1/types/query_plan.py b/google/cloud/spanner_v1/types/query_plan.py
index d361911f1d..efe32934f8 100644
--- a/google/cloud/spanner_v1/types/query_plan.py
+++ b/google/cloud/spanner_v1/types/query_plan.py
@@ -26,6 +26,7 @@
package="google.spanner.v1",
manifest={
"PlanNode",
+ "QueryAdvisorResult",
"QueryPlan",
},
)
@@ -198,6 +199,49 @@ class ShortRepresentation(proto.Message):
)
+class QueryAdvisorResult(proto.Message):
+ r"""Output of query advisor analysis.
+
+ Attributes:
+ index_advice (MutableSequence[google.cloud.spanner_v1.types.QueryAdvisorResult.IndexAdvice]):
+ Optional. Index Recommendation for a query.
+ This is an optional field and the recommendation
+ will only be available when the recommendation
+ guarantees significant improvement in query
+ performance.
+ """
+
+ class IndexAdvice(proto.Message):
+ r"""Recommendation to add new indexes to run queries more
+ efficiently.
+
+ Attributes:
+ ddl (MutableSequence[str]):
+ Optional. DDL statements to add new indexes
+ that will improve the query.
+ improvement_factor (float):
+ Optional. Estimated latency improvement
+ factor. For example if the query currently takes
+ 500 ms to run and the estimated latency with new
+ indexes is 100 ms this field will be 5.
+ """
+
+ ddl: MutableSequence[str] = proto.RepeatedField(
+ proto.STRING,
+ number=1,
+ )
+ improvement_factor: float = proto.Field(
+ proto.DOUBLE,
+ number=2,
+ )
+
+ index_advice: MutableSequence[IndexAdvice] = proto.RepeatedField(
+ proto.MESSAGE,
+ number=1,
+ message=IndexAdvice,
+ )
+
+
class QueryPlan(proto.Message):
r"""Contains an ordered list of nodes appearing in the query
plan.
@@ -208,6 +252,10 @@ class QueryPlan(proto.Message):
pre-order starting with the plan root. Each
[PlanNode][google.spanner.v1.PlanNode]'s ``id`` corresponds
to its index in ``plan_nodes``.
+ query_advice (google.cloud.spanner_v1.types.QueryAdvisorResult):
+ Optional. The advise/recommendations for a
+ query. Currently this field will be serving
+ index recommendations for a query.
"""
plan_nodes: MutableSequence["PlanNode"] = proto.RepeatedField(
@@ -215,6 +263,11 @@ class QueryPlan(proto.Message):
number=1,
message="PlanNode",
)
+ query_advice: "QueryAdvisorResult" = proto.Field(
+ proto.MESSAGE,
+ number=2,
+ message="QueryAdvisorResult",
+ )
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/spanner_v1/types/result_set.py b/google/cloud/spanner_v1/types/result_set.py
index 697d0fd33b..0ab386bc61 100644
--- a/google/cloud/spanner_v1/types/result_set.py
+++ b/google/cloud/spanner_v1/types/result_set.py
@@ -19,6 +19,7 @@
import proto # type: ignore
+from google.cloud.spanner_v1.types import location
from google.cloud.spanner_v1.types import query_plan as gs_query_plan
from google.cloud.spanner_v1.types import transaction as gs_transaction
from google.cloud.spanner_v1.types import type as gs_type
@@ -223,6 +224,14 @@ class PartialResultSet(proto.Message):
``PartialResultSet`` in the stream. The server might
optionally set this field. Clients shouldn't rely on this
field being set in all cases.
+ cache_update (google.cloud.spanner_v1.types.CacheUpdate):
+ Optional. A cache update expresses a set of changes the
+ client should incorporate into its location cache. The
+ client should discard the changes if they are older than the
+ data it already has. This data can be obtained in response
+ to requests that included a ``RoutingHint`` field, but may
+ also be obtained by explicit location-fetching RPCs which
+ may be added in the future.
"""
metadata: "ResultSetMetadata" = proto.Field(
@@ -257,6 +266,11 @@ class PartialResultSet(proto.Message):
proto.BOOL,
number=9,
)
+ cache_update: location.CacheUpdate = proto.Field(
+ proto.MESSAGE,
+ number=10,
+ message=location.CacheUpdate,
+ )
class ResultSetMetadata(proto.Message):
diff --git a/google/cloud/spanner_v1/types/spanner.py b/google/cloud/spanner_v1/types/spanner.py
index 9e7a477b46..6e363088de 100644
--- a/google/cloud/spanner_v1/types/spanner.py
+++ b/google/cloud/spanner_v1/types/spanner.py
@@ -20,6 +20,7 @@
import proto # type: ignore
from google.cloud.spanner_v1.types import keys
+from google.cloud.spanner_v1.types import location as gs_location
from google.cloud.spanner_v1.types import mutation
from google.cloud.spanner_v1.types import result_set
from google.cloud.spanner_v1.types import transaction as gs_transaction
@@ -96,10 +97,10 @@ class BatchCreateSessionsRequest(proto.Message):
Parameters to apply to each created session.
session_count (int):
Required. The number of sessions to be created in this batch
- call. The API can return fewer than the requested number of
- sessions. If a specific number of sessions are desired, the
- client can make additional calls to ``BatchCreateSessions``
- (adjusting
+ call. At least one session is created. The API can return
+ fewer than the requested number of sessions. If a specific
+ number of sessions are desired, the client can make
+ additional calls to ``BatchCreateSessions`` (adjusting
[session_count][google.spanner.v1.BatchCreateSessionsRequest.session_count]
as necessary).
"""
@@ -167,9 +168,9 @@ class Session(proto.Message):
The database role which created this session.
multiplexed (bool):
Optional. If ``true``, specifies a multiplexed session. Use
- a multiplexed session for multiple, concurrent read-only
- operations. Don't use them for read-write transactions,
- partitioned reads, or partitioned queries. Use
+ a multiplexed session for multiple, concurrent operations
+ including any combination of read-only and read-write
+ transactions. Use
[``sessions.create``][google.spanner.v1.Spanner.CreateSession]
to create multiplexed sessions. Don't use
[BatchCreateSessions][google.spanner.v1.Spanner.BatchCreateSessions]
@@ -660,6 +661,15 @@ class ExecuteSqlRequest(proto.Message):
example, validation of unique constraints). Given this,
successful execution of a DML statement shouldn't be assumed
until a subsequent ``Commit`` call completes successfully.
+ routing_hint (google.cloud.spanner_v1.types.RoutingHint):
+ Optional. If present, it makes the Spanner
+ requests location-aware.
+ It gives the server hints that can be used to
+ route the request to an appropriate server,
+ potentially significantly decreasing latency and
+ improving throughput. To achieve improved
+ performance, most fields must be filled in with
+ accurate values.
"""
class QueryMode(proto.Enum):
@@ -826,6 +836,11 @@ class QueryOptions(proto.Message):
proto.BOOL,
number=17,
)
+ routing_hint: gs_location.RoutingHint = proto.Field(
+ proto.MESSAGE,
+ number=18,
+ message=gs_location.RoutingHint,
+ )
class ExecuteBatchDmlRequest(proto.Message):
@@ -1385,6 +1400,15 @@ class ReadRequest(proto.Message):
lock_hint (google.cloud.spanner_v1.types.ReadRequest.LockHint):
Optional. Lock Hint for the request, it can
only be used with read-write transactions.
+ routing_hint (google.cloud.spanner_v1.types.RoutingHint):
+ Optional. If present, it makes the Spanner
+ requests location-aware.
+ It gives the server hints that can be used to
+ route the request to an appropriate server,
+ potentially significantly decreasing latency and
+ improving throughput. To achieve improved
+ performance, most fields must be filled in with
+ accurate values.
"""
class OrderBy(proto.Enum):
@@ -1530,6 +1554,11 @@ class LockHint(proto.Enum):
number=17,
enum=LockHint,
)
+ routing_hint: gs_location.RoutingHint = proto.Field(
+ proto.MESSAGE,
+ number=18,
+ message=gs_location.RoutingHint,
+ )
class BeginTransactionRequest(proto.Message):
diff --git a/google/cloud/spanner_v1/types/transaction.py b/google/cloud/spanner_v1/types/transaction.py
index 447c310548..0cc11a73a6 100644
--- a/google/cloud/spanner_v1/types/transaction.py
+++ b/google/cloud/spanner_v1/types/transaction.py
@@ -96,8 +96,9 @@ class TransactionOptions(proto.Message):
"""
class IsolationLevel(proto.Enum):
- r"""``IsolationLevel`` is used when setting ``isolation_level`` for a
- transaction.
+ r"""``IsolationLevel`` is used when setting the `isolation
+ level `__
+ for a transaction.
Values:
ISOLATION_LEVEL_UNSPECIFIED (0):
@@ -124,8 +125,8 @@ class IsolationLevel(proto.Enum):
``SERIALIZABLE`` transactions, only write-write conflicts
are detected in snapshot transactions.
- This isolation level does not support Read-only and
- Partitioned DML transactions.
+ This isolation level does not support read-only and
+ partitioned DML transactions.
When ``REPEATABLE_READ`` is specified on a read-write
transaction, the locking semantics default to
diff --git a/noxfile.py b/noxfile.py
index b101f46b2e..82715de072 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -30,12 +30,12 @@
FLAKE8_VERSION = "flake8==6.1.0"
BLACK_VERSION = "black[jupyter]==23.7.0"
ISORT_VERSION = "isort==5.11.0"
-LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
+LINT_PATHS = ["google", "tests", "noxfile.py", "setup.py"]
-DEFAULT_PYTHON_VERSION = "3.12"
+DEFAULT_PYTHON_VERSION = "3.14"
DEFAULT_MOCK_SERVER_TESTS_PYTHON_VERSION = "3.12"
-SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.12"]
+SYSTEM_TEST_PYTHON_VERSIONS: List[str] = ["3.14"]
UNIT_TEST_PYTHON_VERSIONS: List[str] = [
"3.9",
@@ -43,6 +43,7 @@
"3.11",
"3.12",
"3.13",
+ "3.14",
]
UNIT_TEST_STANDARD_DEPENDENCIES = [
"mock",
@@ -81,6 +82,7 @@
"unit-3.11",
"unit-3.12",
"unit-3.13",
+ "unit-3.14",
"system",
"cover",
"lint",
@@ -113,7 +115,7 @@ def lint(session):
# Use a python runtime which is available in the owlbot post processor here
# https://github.com/googleapis/synthtool/blob/master/docker/owlbot/python/Dockerfile
-@nox.session(python=["3.10", DEFAULT_PYTHON_VERSION])
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def blacken(session):
"""Run black. Format code to uniform standard."""
session.install(BLACK_VERSION)
@@ -195,7 +197,12 @@ def install_unittest_dependencies(session, *constraints):
def unit(session, protobuf_implementation):
# Install all test dependencies, then install this package in-place.
- if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
+ if protobuf_implementation == "cpp" and session.python in (
+ "3.11",
+ "3.12",
+ "3.13",
+ "3.14",
+ ):
session.skip("cpp implementation is not supported in python 3.11+")
constraints_path = str(
@@ -213,6 +220,7 @@ def unit(session, protobuf_implementation):
session.run(
"py.test",
"--quiet",
+ "-s",
f"--junitxml=unit_{session.python}_sponge_log.xml",
"--cov=google",
"--cov=tests/unit",
@@ -326,7 +334,12 @@ def system(session, protobuf_implementation, database_dialect):
"Only run system tests on real Spanner with one protobuf implementation to speed up the build"
)
- if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
+ if protobuf_implementation == "cpp" and session.python in (
+ "3.11",
+ "3.12",
+ "3.13",
+ "3.14",
+ ):
session.skip("cpp implementation is not supported in python 3.11+")
# Install pyopenssl for mTLS testing.
@@ -470,7 +483,7 @@ def docfx(session):
)
-@nox.session(python="3.13")
+@nox.session(python="3.14")
@nox.parametrize(
"protobuf_implementation,database_dialect",
[
@@ -485,7 +498,12 @@ def docfx(session):
def prerelease_deps(session, protobuf_implementation, database_dialect):
"""Run all tests with prerelease versions of dependencies installed."""
- if protobuf_implementation == "cpp" and session.python in ("3.11", "3.12", "3.13"):
+ if protobuf_implementation == "cpp" and session.python in (
+ "3.11",
+ "3.12",
+ "3.13",
+ "3.14",
+ ):
session.skip("cpp implementation is not supported in python 3.11+")
# Install all dependencies
diff --git a/release-please-config.json b/release-please-config.json
deleted file mode 100644
index faae5c405c..0000000000
--- a/release-please-config.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
- "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json",
- "packages": {
- ".": {
- "release-type": "python",
- "extra-files": [
- "google/cloud/spanner_admin_instance_v1/gapic_version.py",
- "google/cloud/spanner_v1/gapic_version.py",
- "google/cloud/spanner_admin_database_v1/gapic_version.py",
- {
- "type": "json",
- "path": "samples/generated_samples/snippet_metadata_google.spanner.v1.json",
- "jsonpath": "$.clientLibrary.version"
- },
- {
- "type": "json",
- "path": "samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json",
- "jsonpath": "$.clientLibrary.version"
- },
- {
- "type": "json",
- "path": "samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json",
- "jsonpath": "$.clientLibrary.version"
- }
- ]
- }
- },
- "release-type": "python",
- "plugins": [
- {
- "type": "sentence-case"
- }
- ],
- "initial-version": "0.1.0"
-}
diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json
index e6eeb1f977..0bfe97d988 100644
--- a/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json
+++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.database.v1.json
@@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-spanner-admin-database",
- "version": "3.59.0"
+ "version": "3.60.0"
},
"snippets": [
{
diff --git a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json
index 92ae0279ef..9b51de3471 100644
--- a/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json
+++ b/samples/generated_samples/snippet_metadata_google.spanner.admin.instance.v1.json
@@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-spanner-admin-instance",
- "version": "3.59.0"
+ "version": "3.60.0"
},
"snippets": [
{
diff --git a/samples/generated_samples/snippet_metadata_google.spanner.v1.json b/samples/generated_samples/snippet_metadata_google.spanner.v1.json
index 4d84b1ab9a..1ec5a82e5a 100644
--- a/samples/generated_samples/snippet_metadata_google.spanner.v1.json
+++ b/samples/generated_samples/snippet_metadata_google.spanner.v1.json
@@ -8,7 +8,7 @@
],
"language": "PYTHON",
"name": "google-cloud-spanner",
- "version": "3.59.0"
+ "version": "3.60.0"
},
"snippets": [
{
diff --git a/samples/samples/noxfile.py b/samples/samples/noxfile.py
index 97dc6241e7..719e131099 100644
--- a/samples/samples/noxfile.py
+++ b/samples/samples/noxfile.py
@@ -89,7 +89,7 @@ def get_pytest_env_vars() -> Dict[str, str]:
# DO NOT EDIT - automatically generated.
# All versions used to test samples.
-ALL_VERSIONS = ["3.9", "3.10", "3.11", "3.12", "3.13"]
+ALL_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"]
# Any default versions that should be ignored.
IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"]
diff --git a/scripts/fixup_spanner_v1_keywords.py b/scripts/fixup_spanner_v1_keywords.py
index c7f41be11e..e0787f13b4 100644
--- a/scripts/fixup_spanner_v1_keywords.py
+++ b/scripts/fixup_spanner_v1_keywords.py
@@ -46,15 +46,15 @@ class spannerCallTransformer(cst.CSTTransformer):
'create_session': ('database', 'session', ),
'delete_session': ('name', ),
'execute_batch_dml': ('session', 'transaction', 'statements', 'seqno', 'request_options', 'last_statements', ),
- 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', ),
- 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', ),
+ 'execute_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', 'routing_hint', ),
+ 'execute_streaming_sql': ('session', 'sql', 'transaction', 'params', 'param_types', 'resume_token', 'query_mode', 'partition_token', 'seqno', 'query_options', 'request_options', 'directed_read_options', 'data_boost_enabled', 'last_statement', 'routing_hint', ),
'get_session': ('name', ),
'list_sessions': ('database', 'page_size', 'page_token', 'filter', ),
'partition_query': ('session', 'sql', 'transaction', 'params', 'param_types', 'partition_options', ),
'partition_read': ('session', 'table', 'key_set', 'transaction', 'index', 'columns', 'partition_options', ),
- 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ),
+ 'read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', 'routing_hint', ),
'rollback': ('session', 'transaction_id', ),
- 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', ),
+ 'streaming_read': ('session', 'table', 'columns', 'key_set', 'transaction', 'index', 'limit', 'resume_token', 'partition_token', 'request_options', 'directed_read_options', 'data_boost_enabled', 'order_by', 'lock_hint', 'routing_hint', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
diff --git a/setup.py b/setup.py
index 858982f783..fdd911bfd1 100644
--- a/setup.py
+++ b/setup.py
@@ -44,18 +44,15 @@
"proto-plus >= 1.22.2, <2.0.0; python_version>='3.11'",
"protobuf>=3.20.2,<7.0.0,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5",
"grpc-interceptor >= 0.15.4",
+ # Make OpenTelemetry a core dependency
+ "opentelemetry-api >= 1.22.0",
+ "opentelemetry-sdk >= 1.22.0",
+ "opentelemetry-semantic-conventions >= 0.43b0",
+ "opentelemetry-resourcedetector-gcp >= 1.8.0a0",
+ "google-cloud-monitoring >= 2.16.0",
+ "mmh3 >= 4.1.0 ",
]
-extras = {
- "tracing": [
- "opentelemetry-api >= 1.22.0",
- "opentelemetry-sdk >= 1.22.0",
- "opentelemetry-semantic-conventions >= 0.43b0",
- "opentelemetry-resourcedetector-gcp >= 1.8.0a0",
- "google-cloud-monitoring >= 2.16.0",
- "mmh3 >= 4.1.0 ",
- ],
- "libcst": "libcst >= 0.2.5",
-}
+extras = {"libcst": "libcst >= 0.2.5"}
url = "https://github.com/googleapis/python-spanner"
@@ -90,6 +87,7 @@
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
+ "Programming Language :: Python :: 3.14",
"Operating System :: OS Independent",
"Topic :: Internet",
],
diff --git a/testing/constraints-3.10.txt b/testing/constraints-3.10.txt
index ad3f0fa58e..ef1c92ffff 100644
--- a/testing/constraints-3.10.txt
+++ b/testing/constraints-3.10.txt
@@ -2,6 +2,8 @@
# This constraints file is required for unit tests.
# List all library dependencies and extras in this file.
google-api-core
+google-auth
+grpcio
proto-plus
protobuf
grpc-google-iam-v1
diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt
index ad3f0fa58e..ef1c92ffff 100644
--- a/testing/constraints-3.11.txt
+++ b/testing/constraints-3.11.txt
@@ -2,6 +2,8 @@
# This constraints file is required for unit tests.
# List all library dependencies and extras in this file.
google-api-core
+google-auth
+grpcio
proto-plus
protobuf
grpc-google-iam-v1
diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt
index ad3f0fa58e..ef1c92ffff 100644
--- a/testing/constraints-3.12.txt
+++ b/testing/constraints-3.12.txt
@@ -2,6 +2,8 @@
# This constraints file is required for unit tests.
# List all library dependencies and extras in this file.
google-api-core
+google-auth
+grpcio
proto-plus
protobuf
grpc-google-iam-v1
diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt
index 2010e549cc..2ae5a677e8 100644
--- a/testing/constraints-3.13.txt
+++ b/testing/constraints-3.13.txt
@@ -7,6 +7,7 @@
# Then this file should have google-cloud-foo>=1
google-api-core>=2
google-auth>=2
+grpcio>=1
proto-plus>=1
protobuf>=6
grpc-google-iam-v1>=0
diff --git a/testing/constraints-3.14.txt b/testing/constraints-3.14.txt
new file mode 100644
index 0000000000..92054fc895
--- /dev/null
+++ b/testing/constraints-3.14.txt
@@ -0,0 +1,13 @@
+# We use the constraints file for the latest Python version
+# (currently this file) to check that the latest
+# major versions of dependencies are supported in setup.py.
+# List all library dependencies and extras in this file.
+# Require the latest major version be installed for each dependency.
+# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0",
+# Then this file should have google-cloud-foo>=1
+google-api-core>=2
+google-auth>=2
+grpcio>=1
+proto-plus>=1
+protobuf>=6
+grpc-google-iam-v1>=0
\ No newline at end of file
diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt
index ad3f0fa58e..ef1c92ffff 100644
--- a/testing/constraints-3.8.txt
+++ b/testing/constraints-3.8.txt
@@ -2,6 +2,8 @@
# This constraints file is required for unit tests.
# List all library dependencies and extras in this file.
google-api-core
+google-auth
+grpcio
proto-plus
protobuf
grpc-google-iam-v1
diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt
index ad3f0fa58e..ef1c92ffff 100644
--- a/testing/constraints-3.9.txt
+++ b/testing/constraints-3.9.txt
@@ -2,6 +2,8 @@
# This constraints file is required for unit tests.
# List all library dependencies and extras in this file.
google-api-core
+google-auth
+grpcio
proto-plus
protobuf
grpc-google-iam-v1
diff --git a/tests/system/_helpers.py b/tests/system/_helpers.py
index 1fc897b39c..10f970427e 100644
--- a/tests/system/_helpers.py
+++ b/tests/system/_helpers.py
@@ -56,6 +56,12 @@
EMULATOR_PROJECT_DEFAULT = "emulator-test-project"
EMULATOR_PROJECT = os.getenv(EMULATOR_PROJECT_ENVVAR, EMULATOR_PROJECT_DEFAULT)
+USE_EXPERIMENTAL_HOST_ENVVAR = "SPANNER_EXPERIMENTAL_HOST"
+EXPERIMENTAL_HOST = os.getenv(USE_EXPERIMENTAL_HOST_ENVVAR)
+USE_EXPERIMENTAL_HOST = EXPERIMENTAL_HOST is not None
+
+EXPERIMENTAL_HOST_PROJECT = "default"
+EXPERIMENTAL_HOST_INSTANCE = "default"
DDL_STATEMENTS = (
_fixtures.PG_DDL_STATEMENTS
diff --git a/tests/system/conftest.py b/tests/system/conftest.py
index bc94d065b2..6b0ad6cebe 100644
--- a/tests/system/conftest.py
+++ b/tests/system/conftest.py
@@ -49,6 +49,12 @@ def not_emulator():
pytest.skip(f"{_helpers.USE_EMULATOR_ENVVAR} set in environment.")
+@pytest.fixture(scope="module")
+def not_experimental_host():
+ if _helpers.USE_EXPERIMENTAL_HOST:
+ pytest.skip(f"{_helpers.USE_EXPERIMENTAL_HOST_ENVVAR} set in environment.")
+
+
@pytest.fixture(scope="session")
def not_postgres(database_dialect):
if database_dialect == DatabaseDialect.POSTGRESQL:
@@ -104,6 +110,15 @@ def spanner_client():
project=_helpers.EMULATOR_PROJECT,
credentials=credentials,
)
+ elif _helpers.USE_EXPERIMENTAL_HOST:
+ from google.auth.credentials import AnonymousCredentials
+
+ credentials = AnonymousCredentials()
+ return spanner_v1.Client(
+ project=_helpers.EXPERIMENTAL_HOST_PROJECT,
+ credentials=credentials,
+ experimental_host=_helpers.EXPERIMENTAL_HOST,
+ )
else:
client_options = {"api_endpoint": _helpers.API_ENDPOINT}
return spanner_v1.Client(
@@ -130,7 +145,8 @@ def backup_operation_timeout():
def shared_instance_id():
if _helpers.CREATE_INSTANCE:
return f"{_helpers.unique_id('google-cloud')}"
-
+ if _helpers.USE_EXPERIMENTAL_HOST:
+ return _helpers.EXPERIMENTAL_HOST_INSTANCE
return _helpers.INSTANCE_ID
@@ -138,7 +154,7 @@ def shared_instance_id():
def instance_configs(spanner_client):
configs = list(_helpers.retry_503(spanner_client.list_instance_configs)())
- if not _helpers.USE_EMULATOR:
+ if not _helpers.USE_EMULATOR and not _helpers.USE_EXPERIMENTAL_HOST:
# Defend against back-end returning configs for regions we aren't
# actually allowed to use.
configs = [config for config in configs if "-us-" in config.name]
diff --git a/tests/system/test_backup_api.py b/tests/system/test_backup_api.py
index 6ffc74283e..26a2620765 100644
--- a/tests/system/test_backup_api.py
+++ b/tests/system/test_backup_api.py
@@ -26,10 +26,16 @@
Remove {_helpers.SKIP_BACKUP_TESTS_ENVVAR} from environment to run these tests.\
"""
skip_emulator_reason = "Backup operations not supported by emulator."
+skip_experimental_host_reason = (
+ "Backup operations not supported on experimental host yet."
+)
pytestmark = [
pytest.mark.skipif(_helpers.SKIP_BACKUP_TESTS, reason=skip_env_reason),
pytest.mark.skipif(_helpers.USE_EMULATOR, reason=skip_emulator_reason),
+ pytest.mark.skipif(
+ _helpers.USE_EXPERIMENTAL_HOST, reason=skip_experimental_host_reason
+ ),
]
diff --git a/tests/system/test_database_api.py b/tests/system/test_database_api.py
index e3c18ece10..d47826baf4 100644
--- a/tests/system/test_database_api.py
+++ b/tests/system/test_database_api.py
@@ -47,7 +47,9 @@
@pytest.fixture(scope="module")
-def multiregion_instance(spanner_client, instance_operation_timeout, not_postgres):
+def multiregion_instance(
+ spanner_client, instance_operation_timeout, not_postgres, not_experimental_host
+):
multi_region_instance_id = _helpers.unique_id("multi-region")
multi_region_config = "nam3"
config_name = "{}/instanceConfigs/{}".format(
@@ -97,6 +99,7 @@ def test_database_binding_of_fixed_size_pool(
databases_to_delete,
not_postgres,
proto_descriptor_file,
+ not_experimental_host,
):
temp_db_id = _helpers.unique_id("fixed_size_db", separator="_")
temp_db = shared_instance.database(temp_db_id)
@@ -130,6 +133,7 @@ def test_database_binding_of_pinging_pool(
databases_to_delete,
not_postgres,
proto_descriptor_file,
+ not_experimental_host,
):
temp_db_id = _helpers.unique_id("binding_db", separator="_")
temp_db = shared_instance.database(temp_db_id)
@@ -217,6 +221,7 @@ def test_create_database_pitr_success(
def test_create_database_with_default_leader_success(
not_emulator, # Default leader setting not supported by the emulator
not_postgres,
+ not_experimental_host,
multiregion_instance,
databases_to_delete,
):
@@ -253,6 +258,7 @@ def test_create_database_with_default_leader_success(
def test_iam_policy(
not_emulator,
+ not_experimental_host,
shared_instance,
databases_to_delete,
):
@@ -414,6 +420,7 @@ def test_update_ddl_w_pitr_success(
def test_update_ddl_w_default_leader_success(
not_emulator,
not_postgres,
+ not_experimental_host,
multiregion_instance,
databases_to_delete,
proto_descriptor_file,
@@ -448,6 +455,7 @@ def test_update_ddl_w_default_leader_success(
def test_create_role_grant_access_success(
not_emulator,
+ not_experimental_host,
shared_instance,
databases_to_delete,
database_dialect,
@@ -514,6 +522,7 @@ def test_create_role_grant_access_success(
def test_list_database_role_success(
not_emulator,
+ not_experimental_host,
shared_instance,
databases_to_delete,
database_dialect,
@@ -757,7 +766,11 @@ def test_information_schema_referential_constraints_fkadc(
def test_update_database_success(
- not_emulator, shared_database, shared_instance, database_operation_timeout
+ not_emulator,
+ not_experimental_host,
+ shared_database,
+ shared_instance,
+ database_operation_timeout,
):
old_protection = shared_database.enable_drop_protection
new_protection = True
diff --git a/tests/system/test_dbapi.py b/tests/system/test_dbapi.py
index 4cc718e275..309f533170 100644
--- a/tests/system/test_dbapi.py
+++ b/tests/system/test_dbapi.py
@@ -1436,7 +1436,13 @@ def test_ping(self):
@pytest.mark.noautofixt
def test_user_agent(self, shared_instance, dbapi_database):
"""Check that DB API uses an appropriate user agent."""
- conn = connect(shared_instance.name, dbapi_database.name)
+ conn = connect(
+ shared_instance.name,
+ dbapi_database.name,
+ experimental_host=_helpers.EXPERIMENTAL_HOST
+ if _helpers.USE_EXPERIMENTAL_HOST
+ else None,
+ )
assert (
conn.instance._client._client_info.user_agent
== "gl-dbapi/" + package_version.__version__
diff --git a/tests/system/test_instance_api.py b/tests/system/test_instance_api.py
index fe962d2ccb..274a104cae 100644
--- a/tests/system/test_instance_api.py
+++ b/tests/system/test_instance_api.py
@@ -119,6 +119,7 @@ def test_update_instance(
shared_instance,
shared_instance_id,
instance_operation_timeout,
+ not_experimental_host,
):
old_display_name = shared_instance.display_name
new_display_name = "Foo Bar Baz"
diff --git a/tests/system/test_metrics.py b/tests/system/test_metrics.py
new file mode 100644
index 0000000000..acc8d45cee
--- /dev/null
+++ b/tests/system/test_metrics.py
@@ -0,0 +1,92 @@
+# Copyright 2025 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import mock
+import pytest
+
+from opentelemetry.sdk.metrics import MeterProvider
+from opentelemetry.sdk.metrics.export import InMemoryMetricReader
+
+from google.cloud.spanner_v1 import Client
+
+# System tests are skipped if the environment variables are not set.
+PROJECT = os.environ.get("GOOGLE_CLOUD_PROJECT")
+INSTANCE_ID = os.environ.get("SPANNER_TEST_INSTANCE")
+DATABASE_ID = "test_metrics_db_system"
+
+
+pytestmark = pytest.mark.skipif(
+ not all([PROJECT, INSTANCE_ID]), reason="System test environment variables not set."
+)
+
+
+@pytest.fixture(scope="module")
+def metrics_database():
+ """Create a database for the test."""
+ client = Client(project=PROJECT)
+ instance = client.instance(INSTANCE_ID)
+ database = instance.database(DATABASE_ID)
+ if database.exists(): # Clean up from previous failed run
+ database.drop()
+ op = database.create()
+ op.result(timeout=300) # Wait for creation to complete
+ yield database
+ if database.exists():
+ database.drop()
+
+
+def test_builtin_metrics_with_default_otel(metrics_database):
+ """
+ Verifies that built-in metrics are collected by default when a
+ transaction is executed.
+ """
+ reader = InMemoryMetricReader()
+ meter_provider = MeterProvider(metric_readers=[reader])
+
+ # Patch the client's metric setup to use our in-memory reader.
+ with mock.patch(
+ "google.cloud.spanner_v1.client.MeterProvider",
+ return_value=meter_provider,
+ ):
+ with mock.patch.dict(os.environ, {"SPANNER_DISABLE_BUILTIN_METRICS": "false"}):
+ with metrics_database.snapshot() as snapshot:
+ list(snapshot.execute_sql("SELECT 1"))
+
+ metric_data = reader.get_metrics_data()
+
+ assert len(metric_data.resource_metrics) >= 1
+ assert len(metric_data.resource_metrics[0].scope_metrics) >= 1
+
+ collected_metrics = {
+ metric.name
+ for metric in metric_data.resource_metrics[0].scope_metrics[0].metrics
+ }
+ expected_metrics = {
+ "spanner/operation_latencies",
+ "spanner/attempt_latencies",
+ "spanner/operation_count",
+ "spanner/attempt_count",
+ "spanner/gfe_latencies",
+ }
+ assert expected_metrics.issubset(collected_metrics)
+
+ for metric in metric_data.resource_metrics[0].scope_metrics[0].metrics:
+ if metric.name == "spanner/operation_count":
+ point = next(iter(metric.data.data_points))
+ assert point.value == 1
+ assert point.attributes["method"] == "ExecuteSql"
+ return
+
+ pytest.fail("Metric 'spanner/operation_count' not found.")
diff --git a/tests/system/test_session_api.py b/tests/system/test_session_api.py
index 04d8ad799a..2b0caba4e1 100644
--- a/tests/system/test_session_api.py
+++ b/tests/system/test_session_api.py
@@ -30,6 +30,7 @@
from google.cloud.spanner_admin_database_v1 import DatabaseDialect
from google.cloud._helpers import UTC
+from google.cloud.spanner_v1._helpers import _get_cloud_region
from google.cloud.spanner_v1._helpers import AtomicCounter
from google.cloud.spanner_v1.data_types import JsonObject
from google.cloud.spanner_v1.database_sessions_manager import TransactionType
@@ -295,7 +296,9 @@ def sessions_database(
_helpers.retry_has_all_dll(sessions_database.reload)()
# Some tests expect there to be a session present in the pool.
- pool.put(pool.get())
+ # Experimental host connections only support multiplexed sessions
+ if not _helpers.USE_EXPERIMENTAL_HOST:
+ pool.put(pool.get())
yield sessions_database
@@ -354,6 +357,7 @@ def _make_attributes(db_instance, **kwargs):
"db.url": "spanner.googleapis.com",
"net.host.name": "spanner.googleapis.com",
"db.instance": db_instance,
+ "cloud.region": _get_cloud_region(),
"gcp.client.service": "spanner",
"gcp.client.version": ot_helpers.LIB_VERSION,
"gcp.client.repo": "googleapis/python-spanner",
@@ -2268,7 +2272,7 @@ def test_read_with_range_keys_and_index_open_open(sessions_database):
assert rows == expected
-def test_partition_read_w_index(sessions_database, not_emulator):
+def test_partition_read_w_index(sessions_database, not_emulator, not_experimental_host):
sd = _sample_data
row_count = 10
columns = sd.COLUMNS[1], sd.COLUMNS[2]
@@ -3052,7 +3056,7 @@ def test_execute_sql_returning_transfinite_floats(sessions_database, not_postgre
assert math.isnan(float_array[2])
-def test_partition_query(sessions_database, not_emulator):
+def test_partition_query(sessions_database, not_emulator, not_experimental_host):
row_count = 40
sql = f"SELECT * FROM {_sample_data.TABLE}"
committed = _set_up_table(sessions_database, row_count)
@@ -3071,7 +3075,7 @@ def test_partition_query(sessions_database, not_emulator):
batch_txn.close()
-def test_run_partition_query(sessions_database, not_emulator):
+def test_run_partition_query(sessions_database, not_emulator, not_experimental_host):
row_count = 40
sql = f"SELECT * FROM {_sample_data.TABLE}"
committed = _set_up_table(sessions_database, row_count)
diff --git a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py
index f62b95c85d..e210da1d37 100644
--- a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py
+++ b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py
@@ -1136,6 +1136,7 @@ def test_database_admin_client_create_channel_credentials_file(
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -23315,6 +23316,7 @@ def test_database_admin_transport_create_channel(transport_class, grpc_helpers):
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -23347,6 +23349,7 @@ def test_database_admin_grpc_transport_client_cert_source_for_mtls(transport_cla
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -23593,6 +23596,7 @@ def test_database_admin_transport_channel_mtls_with_client_cert_source(transport
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
assert transport.grpc_channel == mock_grpc_channel
@@ -23640,6 +23644,7 @@ def test_database_admin_transport_channel_mtls_with_adc(transport_class):
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
assert transport.grpc_channel == mock_grpc_channel
diff --git a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py
index 52424e65d3..532014af96 100644
--- a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py
+++ b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py
@@ -1125,6 +1125,7 @@ def test_instance_admin_client_create_channel_credentials_file(
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -18621,6 +18622,7 @@ def test_instance_admin_transport_create_channel(transport_class, grpc_helpers):
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -18653,6 +18655,7 @@ def test_instance_admin_grpc_transport_client_cert_source_for_mtls(transport_cla
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -18881,6 +18884,7 @@ def test_instance_admin_transport_channel_mtls_with_client_cert_source(transport
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
assert transport.grpc_channel == mock_grpc_channel
@@ -18928,6 +18932,7 @@ def test_instance_admin_transport_channel_mtls_with_adc(transport_class):
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
assert transport.grpc_channel == mock_grpc_channel
diff --git a/tests/unit/gapic/spanner_v1/test_spanner.py b/tests/unit/gapic/spanner_v1/test_spanner.py
index 83d9d72f7f..d71d85a443 100644
--- a/tests/unit/gapic/spanner_v1/test_spanner.py
+++ b/tests/unit/gapic/spanner_v1/test_spanner.py
@@ -59,6 +59,7 @@
from google.cloud.spanner_v1.services.spanner import transports
from google.cloud.spanner_v1.types import commit_response
from google.cloud.spanner_v1.types import keys
+from google.cloud.spanner_v1.types import location
from google.cloud.spanner_v1.types import mutation
from google.cloud.spanner_v1.types import result_set
from google.cloud.spanner_v1.types import spanner
@@ -1066,6 +1067,7 @@ def test_spanner_client_create_channel_credentials_file(
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -12180,6 +12182,7 @@ def test_spanner_transport_create_channel(transport_class, grpc_helpers):
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -12209,6 +12212,7 @@ def test_spanner_grpc_transport_client_cert_source_for_mtls(transport_class):
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
@@ -12419,6 +12423,7 @@ def test_spanner_transport_channel_mtls_with_client_cert_source(transport_class)
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
assert transport.grpc_channel == mock_grpc_channel
@@ -12463,6 +12468,7 @@ def test_spanner_transport_channel_mtls_with_adc(transport_class):
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
+ ("grpc.keepalive_time_ms", 120000),
],
)
assert transport.grpc_channel == mock_grpc_channel
diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py
index 6f77d002cd..40db14607c 100644
--- a/tests/unit/test__helpers.py
+++ b/tests/unit/test__helpers.py
@@ -16,7 +16,11 @@
import unittest
import mock
-from google.cloud.spanner_v1 import TransactionOptions
+from opentelemetry.sdk.resources import Resource
+from opentelemetry.semconv.resource import ResourceAttributes
+
+
+from google.cloud.spanner_v1 import TransactionOptions, _helpers
class Test_merge_query_options(unittest.TestCase):
@@ -89,6 +93,48 @@ def test_base_object_merge_dict(self):
self.assertEqual(result, expected)
+class Test_get_cloud_region(unittest.TestCase):
+ def setUp(self):
+ _helpers._cloud_region = None
+
+ def _callFUT(self, *args, **kw):
+ from google.cloud.spanner_v1._helpers import _get_cloud_region
+
+ return _get_cloud_region(*args, **kw)
+
+ @mock.patch("google.cloud.spanner_v1._helpers.GoogleCloudResourceDetector.detect")
+ def test_get_location_with_region(self, mock_detect):
+ """Test that _get_cloud_region returns the region when detected."""
+ mock_resource = Resource.create(
+ {ResourceAttributes.CLOUD_REGION: "us-central1"}
+ )
+ mock_detect.return_value = mock_resource
+
+ location = self._callFUT()
+ self.assertEqual(location, "us-central1")
+
+ @mock.patch("google.cloud.spanner_v1._helpers.GoogleCloudResourceDetector.detect")
+ def test_get_location_without_region(self, mock_detect):
+ """Test that _get_cloud_region returns 'global' when no region is detected."""
+ mock_resource = Resource.create({}) # No region attribute
+ mock_detect.return_value = mock_resource
+
+ location = self._callFUT()
+ self.assertEqual(location, "global")
+
+ @mock.patch("google.cloud.spanner_v1._helpers.GoogleCloudResourceDetector.detect")
+ def test_get_location_with_exception(self, mock_detect):
+ """Test that _get_cloud_region returns 'global' and logs a warning on exception."""
+ mock_detect.side_effect = Exception("detector failed")
+
+ with self.assertLogs(
+ "google.cloud.spanner_v1._helpers", level="WARNING"
+ ) as log:
+ location = self._callFUT()
+ self.assertEqual(location, "global")
+ self.assertIn("Failed to detect GCP resource location", log.output[0])
+
+
class Test_make_value_pb(unittest.TestCase):
def _callFUT(self, *args, **kw):
from google.cloud.spanner_v1._helpers import _make_value_pb
diff --git a/tests/unit/test__opentelemetry_tracing.py b/tests/unit/test__opentelemetry_tracing.py
index b3d49355c0..da75e940b6 100644
--- a/tests/unit/test__opentelemetry_tracing.py
+++ b/tests/unit/test__opentelemetry_tracing.py
@@ -1,7 +1,4 @@
-import importlib
import mock
-import unittest
-import sys
try:
from opentelemetry import trace as trace_api
@@ -10,12 +7,12 @@
pass
from google.api_core.exceptions import GoogleAPICallError
+from google.cloud.spanner_v1._helpers import GOOGLE_CLOUD_REGION_GLOBAL
from google.cloud.spanner_v1 import _opentelemetry_tracing
from tests._helpers import (
OpenTelemetryBase,
LIB_VERSION,
- HAS_OPENTELEMETRY_INSTALLED,
enrich_with_otel_scope,
)
@@ -34,200 +31,192 @@ def _make_session():
return mock.Mock(autospec=Session, instance=True)
-# Skip all of these tests if we don't have OpenTelemetry
-if HAS_OPENTELEMETRY_INSTALLED:
-
- class TestNoTracing(unittest.TestCase):
- def setUp(self):
- self._temp_opentelemetry = sys.modules["opentelemetry"]
-
- sys.modules["opentelemetry"] = None
- importlib.reload(_opentelemetry_tracing)
-
- def tearDown(self):
- sys.modules["opentelemetry"] = self._temp_opentelemetry
- importlib.reload(_opentelemetry_tracing)
-
- def test_no_trace_call(self):
- with _opentelemetry_tracing.trace_call("Test", _make_session()) as no_span:
- self.assertIsNone(no_span)
-
- class TestTracing(OpenTelemetryBase):
- def test_trace_call(self):
- extra_attributes = {
- "attribute1": "value1",
- # Since our database is mocked, we have to override the db.instance parameter so it is a string
- "db.instance": "database_name",
+class TestTracing(OpenTelemetryBase):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_trace_call(self, mock_region):
+ extra_attributes = {
+ "attribute1": "value1",
+ # Since our database is mocked, we have to override the db.instance parameter so it is a string
+ "db.instance": "database_name",
+ }
+
+ expected_attributes = enrich_with_otel_scope(
+ {
+ "db.type": "spanner",
+ "db.url": "spanner.googleapis.com",
+ "net.host.name": "spanner.googleapis.com",
+ "cloud.region": GOOGLE_CLOUD_REGION_GLOBAL,
+ "gcp.client.service": "spanner",
+ "gcp.client.version": LIB_VERSION,
+ "gcp.client.repo": "googleapis/python-spanner",
}
+ )
+ expected_attributes.update(extra_attributes)
+
+ with _opentelemetry_tracing.trace_call(
+ "CloudSpanner.Test", _make_session(), extra_attributes
+ ) as span:
+ span.set_attribute("after_setup_attribute", 1)
+
+ expected_attributes["after_setup_attribute"] = 1
+
+ span_list = self.ot_exporter.get_finished_spans()
+ self.assertEqual(len(span_list), 1)
+ span = span_list[0]
+ self.assertEqual(span.kind, trace_api.SpanKind.CLIENT)
+ self.assertEqual(span.attributes, expected_attributes)
+ self.assertEqual(span.name, "CloudSpanner.Test")
+ self.assertEqual(span.status.status_code, StatusCode.OK)
+
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_trace_error(self, mock_region):
+ extra_attributes = {"db.instance": "database_name"}
+
+ expected_attributes = enrich_with_otel_scope(
+ {
+ "db.type": "spanner",
+ "db.url": "spanner.googleapis.com",
+ "net.host.name": "spanner.googleapis.com",
+ "cloud.region": GOOGLE_CLOUD_REGION_GLOBAL,
+ "gcp.client.service": "spanner",
+ "gcp.client.version": LIB_VERSION,
+ "gcp.client.repo": "googleapis/python-spanner",
+ }
+ )
+ expected_attributes.update(extra_attributes)
- expected_attributes = enrich_with_otel_scope(
- {
- "db.type": "spanner",
- "db.url": "spanner.googleapis.com",
- "net.host.name": "spanner.googleapis.com",
- "gcp.client.service": "spanner",
- "gcp.client.version": LIB_VERSION,
- "gcp.client.repo": "googleapis/python-spanner",
- }
- )
- expected_attributes.update(extra_attributes)
-
+ with self.assertRaises(GoogleAPICallError):
with _opentelemetry_tracing.trace_call(
"CloudSpanner.Test", _make_session(), extra_attributes
) as span:
- span.set_attribute("after_setup_attribute", 1)
-
- expected_attributes["after_setup_attribute"] = 1
-
- span_list = self.ot_exporter.get_finished_spans()
- self.assertEqual(len(span_list), 1)
- span = span_list[0]
- self.assertEqual(span.kind, trace_api.SpanKind.CLIENT)
- self.assertEqual(span.attributes, expected_attributes)
- self.assertEqual(span.name, "CloudSpanner.Test")
- self.assertEqual(span.status.status_code, StatusCode.OK)
-
- def test_trace_error(self):
- extra_attributes = {"db.instance": "database_name"}
-
- expected_attributes = enrich_with_otel_scope(
- {
- "db.type": "spanner",
- "db.url": "spanner.googleapis.com",
- "net.host.name": "spanner.googleapis.com",
- "gcp.client.service": "spanner",
- "gcp.client.version": LIB_VERSION,
- "gcp.client.repo": "googleapis/python-spanner",
- }
- )
- expected_attributes.update(extra_attributes)
-
- with self.assertRaises(GoogleAPICallError):
- with _opentelemetry_tracing.trace_call(
- "CloudSpanner.Test", _make_session(), extra_attributes
- ) as span:
- from google.api_core.exceptions import InvalidArgument
-
- raise _make_rpc_error(InvalidArgument)
-
- span_list = self.ot_exporter.get_finished_spans()
- self.assertEqual(len(span_list), 1)
- span = span_list[0]
- self.assertEqual(span.kind, trace_api.SpanKind.CLIENT)
- self.assertEqual(dict(span.attributes), expected_attributes)
- self.assertEqual(span.name, "CloudSpanner.Test")
- self.assertEqual(span.status.status_code, StatusCode.ERROR)
-
- def test_trace_grpc_error(self):
- extra_attributes = {"db.instance": "database_name"}
-
- expected_attributes = enrich_with_otel_scope(
- {
- "db.type": "spanner",
- "db.url": "spanner.googleapis.com:443",
- "net.host.name": "spanner.googleapis.com:443",
- }
- )
- expected_attributes.update(extra_attributes)
-
- with self.assertRaises(GoogleAPICallError):
- with _opentelemetry_tracing.trace_call(
- "CloudSpanner.Test", _make_session(), extra_attributes
- ) as span:
- from google.api_core.exceptions import DataLoss
-
- raise DataLoss("error")
-
- span_list = self.ot_exporter.get_finished_spans()
- self.assertEqual(len(span_list), 1)
- span = span_list[0]
- self.assertEqual(span.status.status_code, StatusCode.ERROR)
-
- def test_trace_codeless_error(self):
- extra_attributes = {"db.instance": "database_name"}
-
- expected_attributes = enrich_with_otel_scope(
- {
- "db.type": "spanner",
- "db.url": "spanner.googleapis.com:443",
- "net.host.name": "spanner.googleapis.com:443",
- }
- )
- expected_attributes.update(extra_attributes)
-
- with self.assertRaises(GoogleAPICallError):
- with _opentelemetry_tracing.trace_call(
- "CloudSpanner.Test", _make_session(), extra_attributes
- ) as span:
- raise GoogleAPICallError("error")
-
- span_list = self.ot_exporter.get_finished_spans()
- self.assertEqual(len(span_list), 1)
- span = span_list[0]
- self.assertEqual(span.status.status_code, StatusCode.ERROR)
-
- def test_trace_call_terminal_span_status_ALWAYS_ON_sampler(self):
- # Verify that we don't unconditionally set the terminal span status to
- # SpanStatus.OK per https://github.com/googleapis/python-spanner/issues/1246
- from opentelemetry.sdk.trace.export import SimpleSpanProcessor
- from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
- InMemorySpanExporter,
- )
- from opentelemetry.trace.status import Status, StatusCode
- from opentelemetry.sdk.trace import TracerProvider
- from opentelemetry.sdk.trace.sampling import ALWAYS_ON
-
- tracer_provider = TracerProvider(sampler=ALWAYS_ON)
- trace_exporter = InMemorySpanExporter()
- tracer_provider.add_span_processor(SimpleSpanProcessor(trace_exporter))
- observability_options = dict(tracer_provider=tracer_provider)
+ from google.api_core.exceptions import InvalidArgument
+
+ raise _make_rpc_error(InvalidArgument)
+
+ span_list = self.ot_exporter.get_finished_spans()
+ self.assertEqual(len(span_list), 1)
+ span = span_list[0]
+ self.assertEqual(span.kind, trace_api.SpanKind.CLIENT)
+ self.assertEqual(dict(span.attributes), expected_attributes)
+ self.assertEqual(span.name, "CloudSpanner.Test")
+ self.assertEqual(span.status.status_code, StatusCode.ERROR)
+
+ def test_trace_grpc_error(self):
+ extra_attributes = {"db.instance": "database_name"}
+
+ expected_attributes = enrich_with_otel_scope(
+ {
+ "db.type": "spanner",
+ "db.url": "spanner.googleapis.com:443",
+ "net.host.name": "spanner.googleapis.com:443",
+ }
+ )
+ expected_attributes.update(extra_attributes)
- session = _make_session()
+ with self.assertRaises(GoogleAPICallError):
with _opentelemetry_tracing.trace_call(
- "VerifyTerminalSpanStatus",
- session,
- observability_options=observability_options,
+ "CloudSpanner.Test", _make_session(), extra_attributes
) as span:
- span.set_status(Status(StatusCode.ERROR, "Our error exhibit"))
-
- span_list = trace_exporter.get_finished_spans()
- got_statuses = []
-
- for span in span_list:
- got_statuses.append(
- (span.name, span.status.status_code, span.status.description)
- )
-
- want_statuses = [
- ("VerifyTerminalSpanStatus", StatusCode.ERROR, "Our error exhibit"),
- ]
- assert got_statuses == want_statuses
-
- def test_trace_call_terminal_span_status_ALWAYS_OFF_sampler(self):
- # Verify that we get the correct status even when using the ALWAYS_OFF
- # sampler which produces the NonRecordingSpan per
- # https://github.com/googleapis/python-spanner/issues/1286
- from opentelemetry.sdk.trace.export import SimpleSpanProcessor
- from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
- InMemorySpanExporter,
- )
- from opentelemetry.sdk.trace import TracerProvider
- from opentelemetry.sdk.trace.sampling import ALWAYS_OFF
+ from google.api_core.exceptions import DataLoss
+
+ raise DataLoss("error")
- tracer_provider = TracerProvider(sampler=ALWAYS_OFF)
- trace_exporter = InMemorySpanExporter()
- tracer_provider.add_span_processor(SimpleSpanProcessor(trace_exporter))
- observability_options = dict(tracer_provider=tracer_provider)
+ span_list = self.ot_exporter.get_finished_spans()
+ self.assertEqual(len(span_list), 1)
+ span = span_list[0]
+ self.assertEqual(span.status.status_code, StatusCode.ERROR)
- session = _make_session()
- used_span = None
+ def test_trace_codeless_error(self):
+ extra_attributes = {"db.instance": "database_name"}
+
+ expected_attributes = enrich_with_otel_scope(
+ {
+ "db.type": "spanner",
+ "db.url": "spanner.googleapis.com:443",
+ "net.host.name": "spanner.googleapis.com:443",
+ }
+ )
+ expected_attributes.update(extra_attributes)
+
+ with self.assertRaises(GoogleAPICallError):
with _opentelemetry_tracing.trace_call(
- "VerifyWithNonRecordingSpan",
- session,
- observability_options=observability_options,
+ "CloudSpanner.Test", _make_session(), extra_attributes
) as span:
- used_span = span
+ raise GoogleAPICallError("error")
+
+ span_list = self.ot_exporter.get_finished_spans()
+ self.assertEqual(len(span_list), 1)
+ span = span_list[0]
+ self.assertEqual(span.status.status_code, StatusCode.ERROR)
+
+ def test_trace_call_terminal_span_status_ALWAYS_ON_sampler(self):
+ # Verify that we don't unconditionally set the terminal span status to
+ # SpanStatus.OK per https://github.com/googleapis/python-spanner/issues/1246
+ from opentelemetry.sdk.trace.export import SimpleSpanProcessor
+ from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
+ InMemorySpanExporter,
+ )
+ from opentelemetry.trace.status import Status, StatusCode
+ from opentelemetry.sdk.trace import TracerProvider
+ from opentelemetry.sdk.trace.sampling import ALWAYS_ON
+
+ tracer_provider = TracerProvider(sampler=ALWAYS_ON)
+ trace_exporter = InMemorySpanExporter()
+ tracer_provider.add_span_processor(SimpleSpanProcessor(trace_exporter))
+ observability_options = dict(tracer_provider=tracer_provider)
+
+ session = _make_session()
+ with _opentelemetry_tracing.trace_call(
+ "VerifyTerminalSpanStatus",
+ session,
+ observability_options=observability_options,
+ ) as span:
+ span.set_status(Status(StatusCode.ERROR, "Our error exhibit"))
+
+ span_list = trace_exporter.get_finished_spans()
+ got_statuses = []
+
+ for span in span_list:
+ got_statuses.append(
+ (span.name, span.status.status_code, span.status.description)
+ )
- assert type(used_span).__name__ == "NonRecordingSpan"
- span_list = list(trace_exporter.get_finished_spans())
- assert span_list == []
+ want_statuses = [
+ ("VerifyTerminalSpanStatus", StatusCode.ERROR, "Our error exhibit"),
+ ]
+ assert got_statuses == want_statuses
+
+ def test_trace_call_terminal_span_status_ALWAYS_OFF_sampler(self):
+ # Verify that we get the correct status even when using the ALWAYS_OFF
+ # sampler which produces the NonRecordingSpan per
+ # https://github.com/googleapis/python-spanner/issues/1286
+ from opentelemetry.sdk.trace.export import SimpleSpanProcessor
+ from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
+ InMemorySpanExporter,
+ )
+ from opentelemetry.sdk.trace import TracerProvider
+ from opentelemetry.sdk.trace.sampling import ALWAYS_OFF
+
+ tracer_provider = TracerProvider(sampler=ALWAYS_OFF)
+ trace_exporter = InMemorySpanExporter()
+ tracer_provider.add_span_processor(SimpleSpanProcessor(trace_exporter))
+ observability_options = dict(tracer_provider=tracer_provider)
+
+ session = _make_session()
+ used_span = None
+ with _opentelemetry_tracing.trace_call(
+ "VerifyWithNonRecordingSpan",
+ session,
+ observability_options=observability_options,
+ ) as span:
+ used_span = span
+
+ assert type(used_span).__name__ == "NonRecordingSpan"
+ span_list = list(trace_exporter.get_finished_spans())
+ assert span_list == []
diff --git a/tests/unit/test_batch.py b/tests/unit/test_batch.py
index 1582fcf4a9..e8297030eb 100644
--- a/tests/unit/test_batch.py
+++ b/tests/unit/test_batch.py
@@ -30,6 +30,7 @@
BatchWriteResponse,
DefaultTransactionOptions,
)
+import mock
from google.cloud._helpers import UTC, _datetime_to_pb_timestamp
import datetime
from google.api_core.exceptions import Aborted, Unknown
@@ -57,6 +58,7 @@
"gcp.client.service": "spanner",
"gcp.client.version": LIB_VERSION,
"gcp.client.repo": "googleapis/python-spanner",
+ "cloud.region": "global",
}
enrich_with_otel_scope(BASE_ATTRIBUTES)
@@ -198,7 +200,11 @@ def test_commit_already_committed(self):
self.assertNoSpans()
- def test_commit_grpc_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_grpc_error(self, mock_region):
keys = [[0], [1], [2]]
keyset = KeySet(keys=keys)
database = _Database()
@@ -219,7 +225,11 @@ def test_commit_grpc_error(self):
),
)
- def test_commit_ok(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_ok(self, mock_region):
now = datetime.datetime.utcnow().replace(tzinfo=UTC)
now_pb = _datetime_to_pb_timestamp(now)
response = CommitResponse(commit_timestamp=now_pb)
@@ -376,35 +386,59 @@ def _test_commit_with_options(
self.assertEqual(max_commit_delay_in, max_commit_delay)
- def test_commit_w_request_tag_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_request_tag_success(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
)
self._test_commit_with_options(request_options=request_options)
- def test_commit_w_transaction_tag_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_transaction_tag_success(self, mock_region):
request_options = RequestOptions(
transaction_tag="tag-1-1",
)
self._test_commit_with_options(request_options=request_options)
- def test_commit_w_request_and_transaction_tag_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_request_and_transaction_tag_success(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
transaction_tag="tag-1-1",
)
self._test_commit_with_options(request_options=request_options)
- def test_commit_w_request_and_transaction_tag_dictionary_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_request_and_transaction_tag_dictionary_success(self, mock_region):
request_options = {"request_tag": "tag-1", "transaction_tag": "tag-1-1"}
self._test_commit_with_options(request_options=request_options)
- def test_commit_w_incorrect_tag_dictionary_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_incorrect_tag_dictionary_error(self, mock_region):
request_options = {"incorrect_tag": "tag-1-1"}
with self.assertRaises(ValueError):
self._test_commit_with_options(request_options=request_options)
- def test_commit_w_max_commit_delay(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_max_commit_delay(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
)
@@ -413,7 +447,11 @@ def test_commit_w_max_commit_delay(self):
max_commit_delay_in=datetime.timedelta(milliseconds=100),
)
- def test_commit_w_exclude_txn_from_change_streams(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_exclude_txn_from_change_streams(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
)
@@ -421,7 +459,11 @@ def test_commit_w_exclude_txn_from_change_streams(self):
request_options=request_options, exclude_txn_from_change_streams=True
)
- def test_commit_w_isolation_level(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_isolation_level(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
)
@@ -430,7 +472,11 @@ def test_commit_w_isolation_level(self):
isolation_level=TransactionOptions.IsolationLevel.REPEATABLE_READ,
)
- def test_commit_w_read_lock_mode(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_read_lock_mode(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
)
@@ -439,7 +485,11 @@ def test_commit_w_read_lock_mode(self):
read_lock_mode=TransactionOptions.ReadWrite.ReadLockMode.OPTIMISTIC,
)
- def test_commit_w_isolation_level_and_read_lock_mode(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_isolation_level_and_read_lock_mode(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
)
@@ -449,7 +499,11 @@ def test_commit_w_isolation_level_and_read_lock_mode(self):
read_lock_mode=TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC,
)
- def test_context_mgr_already_committed(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_context_mgr_already_committed(self, mock_region):
now = datetime.datetime.utcnow().replace(tzinfo=UTC)
database = _Database()
api = database.spanner_api = _FauxSpannerAPI()
@@ -463,7 +517,11 @@ def test_context_mgr_already_committed(self):
self.assertEqual(api._committed, None)
- def test_context_mgr_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_context_mgr_success(self, mock_region):
now = datetime.datetime.utcnow().replace(tzinfo=UTC)
now_pb = _datetime_to_pb_timestamp(now)
response = CommitResponse(commit_timestamp=now_pb)
@@ -510,7 +568,11 @@ def test_context_mgr_success(self):
),
)
- def test_context_mgr_failure(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_context_mgr_failure(self, mock_region):
now = datetime.datetime.utcnow().replace(tzinfo=UTC)
now_pb = _datetime_to_pb_timestamp(now)
response = CommitResponse(commit_timestamp=now_pb)
@@ -541,7 +603,11 @@ def test_ctor(self):
groups = self._make_one(session)
self.assertIs(groups._session, session)
- def test_batch_write_already_committed(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_write_already_committed(self, mock_region):
keys = [[0], [1], [2]]
keyset = KeySet(keys=keys)
database = _Database()
@@ -564,7 +630,11 @@ def test_batch_write_already_committed(self):
with self.assertRaises(ValueError):
groups.batch_write()
- def test_batch_write_grpc_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_write_grpc_error(self, mock_region):
keys = [[0], [1], [2]]
keyset = KeySet(keys=keys)
database = _Database()
@@ -662,25 +732,49 @@ def _test_batch_write_with_request_options(
),
)
- def test_batch_write_no_request_options(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_write_no_request_options(self, mock_region):
self._test_batch_write_with_request_options()
- def test_batch_write_end_to_end_tracing_enabled(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_write_end_to_end_tracing_enabled(self, mock_region):
self._test_batch_write_with_request_options(enable_end_to_end_tracing=True)
- def test_batch_write_w_transaction_tag_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_write_w_transaction_tag_success(self, mock_region):
self._test_batch_write_with_request_options(
RequestOptions(transaction_tag="tag-1-1")
)
- def test_batch_write_w_transaction_tag_dictionary_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_write_w_transaction_tag_dictionary_success(self, mock_region):
self._test_batch_write_with_request_options({"transaction_tag": "tag-1-1"})
- def test_batch_write_w_incorrect_tag_dictionary_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_write_w_incorrect_tag_dictionary_error(self, mock_region):
with self.assertRaises(ValueError):
self._test_batch_write_with_request_options({"incorrect_tag": "tag-1-1"})
- def test_batch_write_w_exclude_txn_from_change_streams(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_write_w_exclude_txn_from_change_streams(self, mock_region):
self._test_batch_write_with_request_options(
exclude_txn_from_change_streams=True
)
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index f0d246673a..ab00d45268 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -22,6 +22,7 @@
from tests._builders import build_scoped_credentials
+@mock.patch.dict(os.environ, {"SPANNER_DISABLE_BUILTIN_METRICS": "true"})
class TestClient(unittest.TestCase):
PROJECT = "PROJECT"
PATH = "projects/%s" % (PROJECT,)
@@ -161,8 +162,7 @@ def test_constructor_custom_client_info(self):
creds = build_scoped_credentials()
self._constructor_test_helper(expected_scopes, creds, client_info=client_info)
- # Disable metrics to avoid google.auth.default calls from Metric Exporter
- @mock.patch.dict(os.environ, {"SPANNER_ENABLE_BUILTIN_METRICS": ""})
+ # Metrics are disabled by default for tests in this class
def test_constructor_implicit_credentials(self):
from google.cloud.spanner_v1 import client as MUT
@@ -255,8 +255,8 @@ def test_constructor_w_directed_read_options(self):
expected_scopes, creds, directed_read_options=self.DIRECTED_READ_OPTIONS
)
- @mock.patch.dict(os.environ, {"SPANNER_ENABLE_BUILTIN_METRICS": "true"})
@mock.patch("google.cloud.spanner_v1.client.SpannerMetricsTracerFactory")
+ @mock.patch.dict(os.environ, {"SPANNER_DISABLE_BUILTIN_METRICS": "false"})
def test_constructor_w_metrics_initialization_error(
self, mock_spanner_metrics_factory
):
@@ -278,6 +278,37 @@ def test_constructor_w_metrics_initialization_error(
)
mock_spanner_metrics_factory.assert_called_once()
+ @mock.patch("google.cloud.spanner_v1.client.SpannerMetricsTracerFactory")
+ @mock.patch.dict(os.environ, {"SPANNER_DISABLE_BUILTIN_METRICS": "true"})
+ def test_constructor_w_disable_builtin_metrics_using_env(
+ self, mock_spanner_metrics_factory
+ ):
+ """
+ Test that Client constructor disable metrics using Spanner Option.
+ """
+ from google.cloud.spanner_v1.client import Client
+
+ creds = build_scoped_credentials()
+ client = Client(project=self.PROJECT, credentials=creds)
+ self.assertIsNotNone(client)
+ mock_spanner_metrics_factory.assert_called_once_with(enabled=False)
+
+ @mock.patch("google.cloud.spanner_v1.client.SpannerMetricsTracerFactory")
+ def test_constructor_w_disable_builtin_metrics_using_option(
+ self, mock_spanner_metrics_factory
+ ):
+ """
+ Test that Client constructor disable metrics using Spanner Option.
+ """
+ from google.cloud.spanner_v1.client import Client
+
+ creds = build_scoped_credentials()
+ client = Client(
+ project=self.PROJECT, credentials=creds, disable_builtin_metrics=True
+ )
+ self.assertIsNotNone(client)
+ mock_spanner_metrics_factory.assert_called_once_with(enabled=False)
+
def test_constructor_route_to_leader_disbled(self):
from google.cloud.spanner_v1 import client as MUT
diff --git a/tests/unit/test_database.py b/tests/unit/test_database.py
index fa6792b9da..92001fb52c 100644
--- a/tests/unit/test_database.py
+++ b/tests/unit/test_database.py
@@ -3560,11 +3560,14 @@ def _next_nth_request(self):
class _Instance(object):
- def __init__(self, name, client=_Client(), emulator_host=None):
+ def __init__(
+ self, name, client=_Client(), emulator_host=None, experimental_host=None
+ ):
self.name = name
self.instance_id = name.rsplit("/", 1)[1]
self._client = client
self.emulator_host = emulator_host
+ self.experimental_host = experimental_host
class _Backup(object):
diff --git a/tests/unit/test_pool.py b/tests/unit/test_pool.py
index 409f4b043b..ec03e4350b 100644
--- a/tests/unit/test_pool.py
+++ b/tests/unit/test_pool.py
@@ -155,6 +155,7 @@ class TestFixedSizePool(OpenTelemetryBase):
"gcp.client.service": "spanner",
"gcp.client.version": LIB_VERSION,
"gcp.client.repo": "googleapis/python-spanner",
+ "cloud.region": "global",
}
enrich_with_otel_scope(BASE_ATTRIBUTES)
@@ -175,7 +176,11 @@ def test_ctor_defaults(self):
self.assertEqual(pool.labels, {})
self.assertIsNone(pool.database_role)
- def test_ctor_explicit(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_ctor_explicit(self, mock_region):
labels = {"foo": "bar"}
database_role = "dummy-role"
pool = self._make_one(
@@ -188,7 +193,11 @@ def test_ctor_explicit(self):
self.assertEqual(pool.labels, labels)
self.assertEqual(pool.database_role, database_role)
- def test_bind(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_bind(self, mock_region):
database_role = "dummy-role"
pool = self._make_one()
database = _Database("name")
@@ -209,7 +218,11 @@ def test_bind(self):
for session in SESSIONS:
session.create.assert_not_called()
- def test_get_active(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_active(self, mock_region):
pool = self._make_one(size=4)
database = _Database("name")
SESSIONS = sorted([_Session(database) for i in range(0, 4)])
@@ -223,7 +236,11 @@ def test_get_active(self):
self.assertFalse(session._exists_checked)
self.assertFalse(pool._sessions.full())
- def test_get_non_expired(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_non_expired(self, mock_region):
pool = self._make_one(size=4)
database = _Database("name")
last_use_time = datetime.utcnow() - timedelta(minutes=56)
@@ -240,7 +257,11 @@ def test_get_non_expired(self):
self.assertTrue(session._exists_checked)
self.assertFalse(pool._sessions.full())
- def test_spans_bind_get(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_spans_bind_get(self, mock_region):
if not HAS_OPENTELEMETRY_INSTALLED:
return
@@ -285,7 +306,11 @@ def test_spans_bind_get(self):
]
self.assertSpanEvents("pool.Get", wantEventNames, span_list[-1])
- def test_spans_bind_get_empty_pool(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_spans_bind_get_empty_pool(self, mock_region):
if not HAS_OPENTELEMETRY_INSTALLED:
return
@@ -329,7 +354,11 @@ def test_spans_bind_get_empty_pool(self):
]
assert got_all_events == want_all_events
- def test_spans_pool_bind(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_spans_pool_bind(self, mock_region):
if not HAS_OPENTELEMETRY_INSTALLED:
return
@@ -403,7 +432,11 @@ def test_spans_pool_bind(self):
]
assert got_all_events == want_all_events
- def test_get_expired(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_expired(self, mock_region):
pool = self._make_one(size=4)
database = _Database("name")
last_use_time = datetime.utcnow() - timedelta(minutes=65)
@@ -419,7 +452,11 @@ def test_get_expired(self):
self.assertTrue(SESSIONS[0]._exists_checked)
self.assertFalse(pool._sessions.full())
- def test_get_empty_default_timeout(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_empty_default_timeout(self, mock_region):
import queue
pool = self._make_one(size=1)
@@ -430,7 +467,11 @@ def test_get_empty_default_timeout(self):
self.assertEqual(session_queue._got, {"block": True, "timeout": 10})
- def test_get_empty_explicit_timeout(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_empty_explicit_timeout(self, mock_region):
import queue
pool = self._make_one(size=1, default_timeout=0.1)
@@ -441,7 +482,11 @@ def test_get_empty_explicit_timeout(self):
self.assertEqual(session_queue._got, {"block": True, "timeout": 1})
- def test_put_full(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_put_full(self, mock_region):
import queue
pool = self._make_one(size=4)
@@ -456,7 +501,11 @@ def test_put_full(self):
self.assertTrue(pool._sessions.full())
- def test_put_non_full(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_put_non_full(self, mock_region):
pool = self._make_one(size=4)
database = _Database("name")
SESSIONS = [_Session(database)] * 4
@@ -468,7 +517,11 @@ def test_put_non_full(self):
self.assertTrue(pool._sessions.full())
- def test_clear(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_clear(self, mock_region):
pool = self._make_one()
database = _Database("name")
SESSIONS = [_Session(database)] * 10
@@ -496,6 +549,7 @@ class TestBurstyPool(OpenTelemetryBase):
"gcp.client.service": "spanner",
"gcp.client.version": LIB_VERSION,
"gcp.client.repo": "googleapis/python-spanner",
+ "cloud.region": "global",
}
enrich_with_otel_scope(BASE_ATTRIBUTES)
@@ -525,7 +579,11 @@ def test_ctor_explicit(self):
self.assertEqual(pool.labels, labels)
self.assertEqual(pool.database_role, database_role)
- def test_ctor_explicit_w_database_role_in_db(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_ctor_explicit_w_database_role_in_db(self, mock_region):
database_role = "dummy-role"
pool = self._make_one()
database = pool._database = _Database("name")
@@ -533,7 +591,11 @@ def test_ctor_explicit_w_database_role_in_db(self):
pool.bind(database)
self.assertEqual(pool.database_role, database_role)
- def test_get_empty(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_empty(self, mock_region):
pool = self._make_one()
database = _Database("name")
pool._new_session = mock.Mock(return_value=_Session(database))
@@ -546,7 +608,11 @@ def test_get_empty(self):
session.create.assert_called()
self.assertTrue(pool._sessions.empty())
- def test_spans_get_empty_pool(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_spans_get_empty_pool(self, mock_region):
if not HAS_OPENTELEMETRY_INSTALLED:
return
@@ -584,7 +650,11 @@ def test_spans_get_empty_pool(self):
]
self.assertSpanEvents("pool.Get", wantEventNames, span=create_span)
- def test_get_non_empty_session_exists(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_non_empty_session_exists(self, mock_region):
pool = self._make_one()
database = _Database("name")
previous = _Session(database)
@@ -598,7 +668,11 @@ def test_get_non_empty_session_exists(self):
self.assertTrue(session._exists_checked)
self.assertTrue(pool._sessions.empty())
- def test_spans_get_non_empty_session_exists(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_spans_get_non_empty_session_exists(self, mock_region):
# Tests the spans produces when you invoke pool.bind
# and then insert a session into the pool.
pool = self._make_one()
@@ -622,7 +696,11 @@ def test_spans_get_non_empty_session_exists(self):
["Acquiring session", "Waiting for a session to become available"],
)
- def test_get_non_empty_session_expired(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_non_empty_session_expired(self, mock_region):
pool = self._make_one()
database = _Database("name")
previous = _Session(database, exists=False)
@@ -639,7 +717,11 @@ def test_get_non_empty_session_expired(self):
self.assertFalse(session._exists_checked)
self.assertTrue(pool._sessions.empty())
- def test_put_empty(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_put_empty(self, mock_region):
pool = self._make_one()
database = _Database("name")
pool.bind(database)
@@ -649,7 +731,11 @@ def test_put_empty(self):
self.assertFalse(pool._sessions.empty())
- def test_spans_put_empty(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_spans_put_empty(self, mock_region):
# Tests the spans produced when you put sessions into an empty pool.
pool = self._make_one()
database = _Database("name")
@@ -665,7 +751,11 @@ def test_spans_put_empty(self):
attributes=TestBurstyPool.BASE_ATTRIBUTES,
)
- def test_put_full(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_put_full(self, mock_region):
pool = self._make_one(target_size=1)
database = _Database("name")
pool.bind(database)
@@ -679,7 +769,11 @@ def test_put_full(self):
self.assertTrue(younger._deleted)
self.assertIs(pool.get(), older)
- def test_spans_put_full(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_spans_put_full(self, mock_region):
# This scenario tests the spans produced from putting an older
# session into a pool that is already full.
pool = self._make_one(target_size=1)
@@ -701,7 +795,11 @@ def test_spans_put_full(self):
attributes=TestBurstyPool.BASE_ATTRIBUTES,
)
- def test_put_full_expired(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_put_full_expired(self, mock_region):
pool = self._make_one(target_size=1)
database = _Database("name")
pool.bind(database)
@@ -715,7 +813,11 @@ def test_put_full_expired(self):
self.assertTrue(younger._deleted)
self.assertIs(pool.get(), older)
- def test_clear(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_clear(self, mock_region):
pool = self._make_one()
database = _Database("name")
pool.bind(database)
@@ -737,6 +839,7 @@ class TestPingingPool(OpenTelemetryBase):
"gcp.client.service": "spanner",
"gcp.client.version": LIB_VERSION,
"gcp.client.repo": "googleapis/python-spanner",
+ "cloud.region": "global",
}
enrich_with_otel_scope(BASE_ATTRIBUTES)
@@ -776,7 +879,11 @@ def test_ctor_explicit(self):
self.assertEqual(pool.labels, labels)
self.assertEqual(pool.database_role, database_role)
- def test_ctor_explicit_w_database_role_in_db(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_ctor_explicit_w_database_role_in_db(self, mock_region):
database_role = "dummy-role"
pool = self._make_one()
database = pool._database = _Database("name")
@@ -786,7 +893,11 @@ def test_ctor_explicit_w_database_role_in_db(self):
pool.bind(database)
self.assertEqual(pool.database_role, database_role)
- def test_bind(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_bind(self, mock_region):
pool = self._make_one()
database = _Database("name")
SESSIONS = [_Session(database)] * 10
@@ -804,7 +915,11 @@ def test_bind(self):
for session in SESSIONS:
session.create.assert_not_called()
- def test_get_hit_no_ping(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_hit_no_ping(self, mock_region):
pool = self._make_one(size=4)
database = _Database("name")
SESSIONS = [_Session(database)] * 4
@@ -819,7 +934,11 @@ def test_get_hit_no_ping(self):
self.assertFalse(pool._sessions.full())
self.assertNoSpans()
- def test_get_hit_w_ping(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_hit_w_ping(self, mock_region):
import datetime
from google.cloud._testing import _Monkey
from google.cloud.spanner_v1 import pool as MUT
@@ -843,7 +962,11 @@ def test_get_hit_w_ping(self):
self.assertFalse(pool._sessions.full())
self.assertNoSpans()
- def test_get_hit_w_ping_expired(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_hit_w_ping_expired(self, mock_region):
import datetime
from google.cloud._testing import _Monkey
from google.cloud.spanner_v1 import pool as MUT
@@ -868,7 +991,11 @@ def test_get_hit_w_ping_expired(self):
self.assertFalse(pool._sessions.full())
self.assertNoSpans()
- def test_get_empty_default_timeout(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_empty_default_timeout(self, mock_region):
import queue
pool = self._make_one(size=1)
@@ -880,7 +1007,11 @@ def test_get_empty_default_timeout(self):
self.assertEqual(session_queue._got, {"block": True, "timeout": 10})
self.assertNoSpans()
- def test_get_empty_explicit_timeout(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_get_empty_explicit_timeout(self, mock_region):
import queue
pool = self._make_one(size=1, default_timeout=0.1)
@@ -892,7 +1023,11 @@ def test_get_empty_explicit_timeout(self):
self.assertEqual(session_queue._got, {"block": True, "timeout": 1})
self.assertNoSpans()
- def test_put_full(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_put_full(self, mock_region):
import queue
pool = self._make_one(size=4)
@@ -906,7 +1041,11 @@ def test_put_full(self):
self.assertTrue(pool._sessions.full())
- def test_spans_put_full(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_spans_put_full(self, mock_region):
if not HAS_OPENTELEMETRY_INSTALLED:
return
@@ -946,7 +1085,11 @@ def test_spans_put_full(self):
"CloudSpanner.PingingPool.BatchCreateSessions", wantEventNames
)
- def test_put_non_full(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_put_non_full(self, mock_region):
import datetime
from google.cloud._testing import _Monkey
from google.cloud.spanner_v1 import pool as MUT
@@ -967,7 +1110,11 @@ def test_put_non_full(self):
self.assertIs(queued, session)
self.assertNoSpans()
- def test_clear(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_clear(self, mock_region):
pool = self._make_one()
database = _Database("name")
SESSIONS = [_Session(database)] * 10
@@ -987,12 +1134,20 @@ def test_clear(self):
self.assertTrue(session._deleted)
self.assertNoSpans()
- def test_ping_empty(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_ping_empty(self, mock_region):
pool = self._make_one(size=1)
pool.ping() # Does not raise 'Empty'
self.assertNoSpans()
- def test_ping_oldest_fresh(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_ping_oldest_fresh(self, mock_region):
pool = self._make_one(size=1)
database = _Database("name")
SESSIONS = [_Session(database)] * 1
@@ -1005,7 +1160,11 @@ def test_ping_oldest_fresh(self):
self.assertFalse(SESSIONS[0]._pinged)
self.assertNoSpans()
- def test_ping_oldest_stale_but_exists(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_ping_oldest_stale_but_exists(self, mock_region):
import datetime
from google.cloud._testing import _Monkey
from google.cloud.spanner_v1 import pool as MUT
@@ -1022,7 +1181,11 @@ def test_ping_oldest_stale_but_exists(self):
self.assertTrue(SESSIONS[0]._pinged)
- def test_ping_oldest_stale_and_not_exists(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_ping_oldest_stale_and_not_exists(self, mock_region):
import datetime
from google.cloud._testing import _Monkey
from google.cloud.spanner_v1 import pool as MUT
@@ -1043,7 +1206,11 @@ def test_ping_oldest_stale_and_not_exists(self):
SESSIONS[1].create.assert_called()
self.assertNoSpans()
- def test_spans_get_and_leave_empty_pool(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_spans_get_and_leave_empty_pool(self, mock_region):
if not HAS_OPENTELEMETRY_INSTALLED:
return
diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py
index 3b08cc5c65..bfbd6edd5e 100644
--- a/tests/unit/test_session.py
+++ b/tests/unit/test_session.py
@@ -130,6 +130,7 @@ class TestSession(OpenTelemetryBase):
"gcp.client.service": "spanner",
"gcp.client.version": LIB_VERSION,
"gcp.client.repo": "googleapis/python-spanner",
+ "cloud.region": "global",
}
enrich_with_otel_scope(BASE_ATTRIBUTES)
@@ -222,7 +223,11 @@ def test_create_w_session_id(self):
self.assertNoSpans()
- def test_create_w_database_role(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_create_w_database_role(self, mock_region):
session_pb = self._make_session_pb(
self.SESSION_NAME, database_role=self.DATABASE_ROLE
)
@@ -263,7 +268,11 @@ def test_create_w_database_role(self):
),
)
- def test_create_session_span_annotations(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_create_session_span_annotations(self, mock_region):
session_pb = self._make_session_pb(
self.SESSION_NAME, database_role=self.DATABASE_ROLE
)
@@ -301,7 +310,11 @@ def test_create_session_span_annotations(self):
wantEventNames = ["Creating Session"]
self.assertSpanEvents("TestSessionSpan", wantEventNames, span)
- def test_create_wo_database_role(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_create_wo_database_role(self, mock_region):
session_pb = self._make_session_pb(self.SESSION_NAME)
gax_api = self._make_spanner_api()
gax_api.create_session.return_value = session_pb
@@ -337,7 +350,11 @@ def test_create_wo_database_role(self):
),
)
- def test_create_ok(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_create_ok(self, mock_region):
session_pb = self._make_session_pb(self.SESSION_NAME)
gax_api = self._make_spanner_api()
gax_api.create_session.return_value = session_pb
@@ -373,7 +390,11 @@ def test_create_ok(self):
),
)
- def test_create_w_labels(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_create_w_labels(self, mock_region):
labels = {"foo": "bar"}
session_pb = self._make_session_pb(self.SESSION_NAME, labels=labels)
gax_api = self._make_spanner_api()
@@ -411,7 +432,11 @@ def test_create_w_labels(self):
),
)
- def test_create_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_create_error(self, mock_region):
gax_api = self._make_spanner_api()
gax_api.create_session.side_effect = Unknown("error")
database = self._make_database()
@@ -437,7 +462,11 @@ def test_exists_wo_session_id(self):
self.assertNoSpans()
- def test_exists_hit(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_exists_hit(self, mock_region):
session_pb = self._make_session_pb(self.SESSION_NAME)
gax_api = self._make_spanner_api()
gax_api.get_session.return_value = session_pb
@@ -471,35 +500,10 @@ def test_exists_hit(self):
)
@mock.patch(
- "google.cloud.spanner_v1._opentelemetry_tracing.HAS_OPENTELEMETRY_INSTALLED",
- False,
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
)
- def test_exists_hit_wo_span(self):
- session_pb = self._make_session_pb(self.SESSION_NAME)
- gax_api = self._make_spanner_api()
- gax_api.get_session.return_value = session_pb
- database = self._make_database()
- database.spanner_api = gax_api
- session = self._make_one(database)
- session._session_id = self.SESSION_ID
-
- self.assertTrue(session.exists())
-
- gax_api.get_session.assert_called_once_with(
- name=self.SESSION_NAME,
- metadata=[
- ("google-cloud-resource-prefix", database.name),
- ("x-goog-spanner-route-to-leader", "true"),
- (
- "x-goog-spanner-request-id",
- f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1",
- ),
- ],
- )
-
- self.assertNoSpans()
-
- def test_exists_miss(self):
+ def test_exists_miss(self, mock_region):
gax_api = self._make_spanner_api()
gax_api.get_session.side_effect = NotFound("testing")
database = self._make_database()
@@ -532,34 +536,10 @@ def test_exists_miss(self):
)
@mock.patch(
- "google.cloud.spanner_v1._opentelemetry_tracing.HAS_OPENTELEMETRY_INSTALLED",
- False,
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
)
- def test_exists_miss_wo_span(self):
- gax_api = self._make_spanner_api()
- gax_api.get_session.side_effect = NotFound("testing")
- database = self._make_database()
- database.spanner_api = gax_api
- session = self._make_one(database)
- session._session_id = self.SESSION_ID
-
- self.assertFalse(session.exists())
-
- gax_api.get_session.assert_called_once_with(
- name=self.SESSION_NAME,
- metadata=[
- ("google-cloud-resource-prefix", database.name),
- ("x-goog-spanner-route-to-leader", "true"),
- (
- "x-goog-spanner-request-id",
- f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1",
- ),
- ],
- )
-
- self.assertNoSpans()
-
- def test_exists_error(self):
+ def test_exists_error(self, mock_region):
gax_api = self._make_spanner_api()
gax_api.get_session.side_effect = Unknown("testing")
database = self._make_database()
@@ -597,7 +577,11 @@ def test_ping_wo_session_id(self):
with self.assertRaises(ValueError):
session.ping()
- def test_ping_hit(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_ping_hit(self, mock_region):
gax_api = self._make_spanner_api()
gax_api.execute_sql.return_value = "1"
database = self._make_database()
@@ -612,18 +596,28 @@ def test_ping_hit(self):
sql="SELECT 1",
)
+ req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1"
gax_api.execute_sql.assert_called_once_with(
request=request,
metadata=[
("google-cloud-resource-prefix", database.name),
(
"x-goog-spanner-request-id",
- f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1",
+ req_id,
),
],
)
- def test_ping_miss(self):
+ self.assertSpanAttributes(
+ "CloudSpanner.Session.ping",
+ attributes=dict(self.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id),
+ )
+
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_ping_miss(self, mock_region):
gax_api = self._make_spanner_api()
gax_api.execute_sql.side_effect = NotFound("testing")
database = self._make_database()
@@ -639,18 +633,29 @@ def test_ping_miss(self):
sql="SELECT 1",
)
+ req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1"
gax_api.execute_sql.assert_called_once_with(
request=request,
metadata=[
("google-cloud-resource-prefix", database.name),
(
"x-goog-spanner-request-id",
- f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1",
+ req_id,
),
],
)
- def test_ping_error(self):
+ self.assertSpanAttributes(
+ "CloudSpanner.Session.ping",
+ status=StatusCode.ERROR,
+ attributes=dict(self.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id),
+ )
+
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_ping_error(self, mock_region):
gax_api = self._make_spanner_api()
gax_api.execute_sql.side_effect = Unknown("testing")
database = self._make_database()
@@ -666,17 +671,24 @@ def test_ping_error(self):
sql="SELECT 1",
)
+ req_id = f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1"
gax_api.execute_sql.assert_called_once_with(
request=request,
metadata=[
("google-cloud-resource-prefix", database.name),
(
"x-goog-spanner-request-id",
- f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1",
+ req_id,
),
],
)
+ self.assertSpanAttributes(
+ "CloudSpanner.Session.ping",
+ status=StatusCode.ERROR,
+ attributes=dict(self.BASE_ATTRIBUTES, x_goog_spanner_request_id=req_id),
+ )
+
def test_delete_wo_session_id(self):
database = self._make_database()
session = self._make_one(database)
@@ -686,7 +698,11 @@ def test_delete_wo_session_id(self):
self.assertNoSpans()
- def test_delete_hit(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_delete_hit(self, mock_region):
gax_api = self._make_spanner_api()
gax_api.delete_session.return_value = None
database = self._make_database()
@@ -715,7 +731,11 @@ def test_delete_hit(self):
attributes=dict(attrs, x_goog_spanner_request_id=req_id),
)
- def test_delete_miss(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_delete_miss(self, mock_region):
gax_api = self._make_spanner_api()
gax_api.delete_session.side_effect = NotFound("testing")
database = self._make_database()
@@ -751,7 +771,11 @@ def test_delete_miss(self):
attributes=attrs,
)
- def test_delete_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_delete_error(self, mock_region):
gax_api = self._make_spanner_api()
gax_api.delete_session.side_effect = Unknown("testing")
database = self._make_database()
@@ -1702,10 +1726,11 @@ def unit_of_work(txn, *args, **kw):
def _time(_results=[1, 2, 4, 8]):
return _results.pop(0)
- with mock.patch("time.time", _time):
- with mock.patch("time.sleep") as sleep_mock:
- with self.assertRaises(Aborted):
- session.run_in_transaction(unit_of_work, timeout_secs=8)
+ with mock.patch("time.time", _time), mock.patch(
+ "google.cloud.spanner_v1._helpers.random.random", return_value=0
+ ), mock.patch("time.sleep") as sleep_mock:
+ with self.assertRaises(Aborted):
+ session.run_in_transaction(unit_of_work, timeout_secs=8)
# unpacking call args into list
call_args = [call_[0][0] for call_ in sleep_mock.call_args_list]
diff --git a/tests/unit/test_snapshot.py b/tests/unit/test_snapshot.py
index 5e60d71bd6..974cc8e75e 100644
--- a/tests/unit/test_snapshot.py
+++ b/tests/unit/test_snapshot.py
@@ -76,6 +76,7 @@
"db.url": "spanner.googleapis.com",
"db.instance": "testing",
"net.host.name": "spanner.googleapis.com",
+ "cloud.region": "global",
"gcp.client.service": "spanner",
"gcp.client.version": LIB_VERSION,
"gcp.client.repo": "googleapis/python-spanner",
@@ -115,6 +116,8 @@
class _Derived(_SnapshotBase):
"""A minimally-implemented _SnapshotBase-derived class for testing"""
+ transaction_tag = None
+
# Use a simplified implementation of _build_transaction_options_pb
# that always returns the same transaction options.
TRANSACTION_OPTIONS = TransactionOptions()
@@ -556,7 +559,11 @@ def test_iteration_w_raw_raising_non_retryable_internal_error_after_token(self):
)
self.assertNoSpans()
- def test_iteration_w_span_creation(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_iteration_w_span_creation(self, mock_region):
name = "TestSpan"
extra_atts = {"test_att": 1}
raw = _MockIterator()
@@ -578,7 +585,11 @@ def test_iteration_w_span_creation(self):
),
)
- def test_iteration_w_multiple_span_creation(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_iteration_w_multiple_span_creation(self, mock_region):
from google.api_core.exceptions import ServiceUnavailable
if HAS_OPENTELEMETRY_INSTALLED:
@@ -680,7 +691,11 @@ def test_begin_error_already_begun(self):
self.assertNoSpans()
- def test_begin_error_other(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_begin_error_other(self, mock_region):
derived = _build_snapshot_derived(multi_use=True)
database = derived._session._database
@@ -699,7 +714,11 @@ def test_begin_error_other(self):
attributes=_build_span_attributes(database),
)
- def test_begin_read_write(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_begin_read_write(self, mock_region):
derived = _build_snapshot_derived(multi_use=True, read_only=False)
begin_transaction = derived._session._database.spanner_api.begin_transaction
@@ -707,7 +726,11 @@ def test_begin_read_write(self):
self._execute_begin(derived)
- def test_begin_read_only(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_begin_read_only(self, mock_region):
derived = _build_snapshot_derived(multi_use=True, read_only=True)
begin_transaction = derived._session._database.spanner_api.begin_transaction
@@ -715,7 +738,11 @@ def test_begin_read_only(self):
self._execute_begin(derived)
- def test_begin_precommit_token(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_begin_precommit_token(self, mock_region):
derived = _build_snapshot_derived(multi_use=True)
begin_transaction = derived._session._database.spanner_api.begin_transaction
@@ -725,7 +752,11 @@ def test_begin_precommit_token(self):
self._execute_begin(derived)
- def test_begin_retry_for_internal_server_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_begin_retry_for_internal_server_error(self, mock_region):
derived = _build_snapshot_derived(multi_use=True)
begin_transaction = derived._session._database.spanner_api.begin_transaction
@@ -745,7 +776,11 @@ def test_begin_retry_for_internal_server_error(self):
actual_statuses = self.finished_spans_events_statuses()
self.assertEqual(expected_statuses, actual_statuses)
- def test_begin_retry_for_aborted(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_begin_retry_for_aborted(self, mock_region):
derived = _build_snapshot_derived(multi_use=True)
begin_transaction = derived._session._database.spanner_api.begin_transaction
@@ -813,7 +848,11 @@ def _execute_begin(self, derived: _Derived, attempts: int = 1):
attributes=_build_span_attributes(database, attempt=attempts),
)
- def test_read_other_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_other_error(self, mock_region):
from google.cloud.spanner_v1.keyset import KeySet
keyset = KeySet(all_=True)
@@ -966,8 +1005,9 @@ def _execute_read(
expected_limit = LIMIT
# Transaction tag is ignored for read request.
- expected_request_options = request_options
- expected_request_options.transaction_tag = None
+ expected_request_options = RequestOptions(request_options)
+ if derived.transaction_tag:
+ expected_request_options.transaction_tag = derived.transaction_tag
expected_directed_read_options = (
directed_read_options
@@ -1000,15 +1040,16 @@ def _execute_read(
retry=retry,
timeout=timeout,
)
-
+ expected_attributes = dict(
+ BASE_ATTRIBUTES,
+ table_id=TABLE_NAME,
+ columns=tuple(COLUMNS),
+ x_goog_spanner_request_id=req_id,
+ )
+ if request_options and request_options.request_tag:
+ expected_attributes["request.tag"] = request_options.request_tag
self.assertSpanAttributes(
- "CloudSpanner._Derived.read",
- attributes=dict(
- BASE_ATTRIBUTES,
- table_id=TABLE_NAME,
- columns=tuple(COLUMNS),
- x_goog_spanner_request_id=req_id,
- ),
+ "CloudSpanner._Derived.read", attributes=expected_attributes
)
if first:
@@ -1017,89 +1058,162 @@ def _execute_read(
if use_multiplexed:
self.assertEqual(derived._precommit_token, PRECOMMIT_TOKEN_2)
- def test_read_wo_multi_use(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_wo_multi_use(self, mock_region):
self._execute_read(multi_use=False)
- def test_read_w_request_tag_success(self):
- request_options = RequestOptions(
- request_tag="tag-1",
- )
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_request_tag_success(self, mock_region):
+ request_options = {"request_tag": "tag-1"}
self._execute_read(multi_use=False, request_options=request_options)
- def test_read_w_transaction_tag_success(self):
- request_options = RequestOptions(
- transaction_tag="tag-1-1",
- )
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_transaction_tag_success(self, mock_region):
+ request_options = {"transaction_tag": "tag-1-1"}
self._execute_read(multi_use=False, request_options=request_options)
- def test_read_w_request_and_transaction_tag_success(self):
- request_options = RequestOptions(
- request_tag="tag-1",
- transaction_tag="tag-1-1",
- )
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_request_and_transaction_tag_success(self, mock_region):
+ request_options = {"request_tag": "tag-1", "transaction_tag": "tag-1-1"}
self._execute_read(multi_use=False, request_options=request_options)
- def test_read_w_request_and_transaction_tag_dictionary_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_request_and_transaction_tag_dictionary_success(self, mock_region):
request_options = {"request_tag": "tag-1", "transaction_tag": "tag-1-1"}
self._execute_read(multi_use=False, request_options=request_options)
- def test_read_w_incorrect_tag_dictionary_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_incorrect_tag_dictionary_error(self, mock_region):
request_options = {"incorrect_tag": "tag-1-1"}
with self.assertRaises(ValueError):
self._execute_read(multi_use=False, request_options=request_options)
- def test_read_wo_multi_use_w_read_request_count_gt_0(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_wo_multi_use_w_read_request_count_gt_0(self, mock_region):
with self.assertRaises(ValueError):
self._execute_read(multi_use=False, count=1)
- def test_read_w_multi_use_w_first(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_multi_use_w_first(self, mock_region):
self._execute_read(multi_use=True, first=True)
- def test_read_w_multi_use_wo_first(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_multi_use_wo_first(self, mock_region):
self._execute_read(multi_use=True, first=False)
- def test_read_w_multi_use_wo_first_w_count_gt_0(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_multi_use_wo_first_w_count_gt_0(self, mock_region):
self._execute_read(multi_use=True, first=False, count=1)
- def test_read_w_multi_use_w_first_w_partition(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_multi_use_w_first_w_partition(self, mock_region):
PARTITION = b"FADEABED"
self._execute_read(multi_use=True, first=True, partition=PARTITION)
- def test_read_w_multi_use_w_first_w_count_gt_0(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_multi_use_w_first_w_count_gt_0(self, mock_region):
with self.assertRaises(ValueError):
self._execute_read(multi_use=True, first=True, count=1)
- def test_read_w_timeout_param(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_timeout_param(self, mock_region):
self._execute_read(multi_use=True, first=False, timeout=2.0)
- def test_read_w_retry_param(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_retry_param(self, mock_region):
self._execute_read(multi_use=True, first=False, retry=Retry(deadline=60))
- def test_read_w_timeout_and_retry_params(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_timeout_and_retry_params(self, mock_region):
self._execute_read(
multi_use=True, first=False, retry=Retry(deadline=60), timeout=2.0
)
- def test_read_w_directed_read_options(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_directed_read_options(self, mock_region):
self._execute_read(multi_use=False, directed_read_options=DIRECTED_READ_OPTIONS)
- def test_read_w_directed_read_options_at_client_level(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_directed_read_options_at_client_level(self, mock_region):
self._execute_read(
multi_use=False,
directed_read_options_at_client_level=DIRECTED_READ_OPTIONS_FOR_CLIENT,
)
- def test_read_w_directed_read_options_override(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_directed_read_options_override(self, mock_region):
self._execute_read(
multi_use=False,
directed_read_options=DIRECTED_READ_OPTIONS,
directed_read_options_at_client_level=DIRECTED_READ_OPTIONS_FOR_CLIENT,
)
- def test_read_w_precommit_tokens(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_read_w_precommit_tokens(self, mock_region):
self._execute_read(multi_use=True, use_multiplexed=True)
- def test_execute_sql_other_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_other_error(self, mock_region):
database = _Database()
database.spanner_api = build_spanner_api()
database.spanner_api.execute_streaming_sql.side_effect = RuntimeError()
@@ -1243,10 +1357,11 @@ def _execute_sql_helper(
expected_query_options, query_options
)
- if derived._read_only:
- # Transaction tag is ignored for read only requests.
- expected_request_options = request_options
- expected_request_options.transaction_tag = None
+ expected_request_options = RequestOptions(request_options)
+ if derived.transaction_tag:
+ expected_request_options.transaction_tag = derived.transaction_tag
+ if not derived._read_only and request_options.request_tag:
+ expected_request_options.request_tag = request_options.request_tag
expected_directed_read_options = (
directed_read_options
@@ -1283,16 +1398,20 @@ def _execute_sql_helper(
self.assertEqual(derived._execute_sql_request_count, sql_count + 1)
+ expected_attributes = dict(
+ BASE_ATTRIBUTES,
+ **{
+ "db.statement": SQL_QUERY_WITH_PARAM,
+ "x_goog_spanner_request_id": req_id,
+ },
+ )
+ if request_options and request_options.request_tag:
+ expected_attributes["request.tag"] = request_options.request_tag
+
self.assertSpanAttributes(
"CloudSpanner._Derived.execute_sql",
status=StatusCode.OK,
- attributes=dict(
- BASE_ATTRIBUTES,
- **{
- "db.statement": SQL_QUERY_WITH_PARAM,
- "x_goog_spanner_request_id": req_id,
- },
- ),
+ attributes=expected_attributes,
)
if first:
@@ -1301,33 +1420,61 @@ def _execute_sql_helper(
if use_multiplexed:
self.assertEqual(derived._precommit_token, PRECOMMIT_TOKEN_2)
- def test_execute_sql_wo_multi_use(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_wo_multi_use(self, mock_region):
self._execute_sql_helper(multi_use=False)
def test_execute_sql_wo_multi_use_w_read_request_count_gt_0(self):
with self.assertRaises(ValueError):
self._execute_sql_helper(multi_use=False, count=1)
- def test_execute_sql_w_multi_use_wo_first(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_multi_use_wo_first(self, mock_region):
self._execute_sql_helper(multi_use=True, first=False, sql_count=1)
- def test_execute_sql_w_multi_use_wo_first_w_count_gt_0(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_multi_use_wo_first_w_count_gt_0(self, mock_region):
self._execute_sql_helper(multi_use=True, first=False, count=1)
- def test_execute_sql_w_multi_use_w_first(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_multi_use_w_first(self, mock_region):
self._execute_sql_helper(multi_use=True, first=True)
def test_execute_sql_w_multi_use_w_first_w_count_gt_0(self):
with self.assertRaises(ValueError):
self._execute_sql_helper(multi_use=True, first=True, count=1)
- def test_execute_sql_w_retry(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_retry(self, mock_region):
self._execute_sql_helper(multi_use=False, retry=None)
- def test_execute_sql_w_timeout(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_timeout(self, mock_region):
self._execute_sql_helper(multi_use=False, timeout=None)
- def test_execute_sql_w_query_options(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_query_options(self, mock_region):
from google.cloud.spanner_v1 import ExecuteSqlRequest
self._execute_sql_helper(
@@ -1335,7 +1482,11 @@ def test_execute_sql_w_query_options(self):
query_options=ExecuteSqlRequest.QueryOptions(optimizer_version="3"),
)
- def test_execute_sql_w_request_options(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_request_options(self, mock_region):
self._execute_sql_helper(
multi_use=False,
request_options=RequestOptions(
@@ -1343,26 +1494,37 @@ def test_execute_sql_w_request_options(self):
),
)
- def test_execute_sql_w_request_tag_success(self):
- request_options = RequestOptions(
- request_tag="tag-1",
- )
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_request_tag_success(self, mock_region):
+ request_options = {"request_tag": "tag-1"}
self._execute_sql_helper(multi_use=False, request_options=request_options)
- def test_execute_sql_w_transaction_tag_success(self):
- request_options = RequestOptions(
- transaction_tag="tag-1-1",
- )
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_transaction_tag_success(self, mock_region):
+ request_options = {"transaction_tag": "tag-1-1"}
self._execute_sql_helper(multi_use=False, request_options=request_options)
- def test_execute_sql_w_request_and_transaction_tag_success(self):
- request_options = RequestOptions(
- request_tag="tag-1",
- transaction_tag="tag-1-1",
- )
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_request_and_transaction_tag_success(self, mock_region):
+ request_options = {"request_tag": "tag-1", "transaction_tag": "tag-1-1"}
self._execute_sql_helper(multi_use=False, request_options=request_options)
- def test_execute_sql_w_request_and_transaction_tag_dictionary_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_request_and_transaction_tag_dictionary_success(
+ self, mock_region
+ ):
request_options = {"request_tag": "tag-1", "transaction_tag": "tag-1-1"}
self._execute_sql_helper(multi_use=False, request_options=request_options)
@@ -1371,25 +1533,41 @@ def test_execute_sql_w_incorrect_tag_dictionary_error(self):
with self.assertRaises(ValueError):
self._execute_sql_helper(multi_use=False, request_options=request_options)
- def test_execute_sql_w_directed_read_options(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_directed_read_options(self, mock_region):
self._execute_sql_helper(
multi_use=False, directed_read_options=DIRECTED_READ_OPTIONS
)
- def test_execute_sql_w_directed_read_options_at_client_level(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_directed_read_options_at_client_level(self, mock_region):
self._execute_sql_helper(
multi_use=False,
directed_read_options_at_client_level=DIRECTED_READ_OPTIONS_FOR_CLIENT,
)
- def test_execute_sql_w_directed_read_options_override(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_directed_read_options_override(self, mock_region):
self._execute_sql_helper(
multi_use=False,
directed_read_options=DIRECTED_READ_OPTIONS,
directed_read_options_at_client_level=DIRECTED_READ_OPTIONS_FOR_CLIENT,
)
- def test_execute_sql_w_precommit_tokens(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_sql_w_precommit_tokens(self, mock_region):
self._execute_sql_helper(multi_use=True, use_multiplexed=True)
def _partition_read_helper(
@@ -1497,7 +1675,11 @@ def test_partition_read_wo_existing_transaction_raises(self):
with self.assertRaises(ValueError):
self._partition_read_helper(multi_use=True, w_txn=False)
- def test_partition_read_other_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_read_other_error(self, mock_region):
from google.cloud.spanner_v1.keyset import KeySet
keyset = KeySet(all_=True)
@@ -1556,24 +1738,48 @@ def test_partition_read_w_retry(self):
self.assertEqual(api.partition_read.call_count, 2)
- def test_partition_read_ok_w_index_no_options(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_read_ok_w_index_no_options(self, mock_region):
self._partition_read_helper(multi_use=True, w_txn=True, index="index")
- def test_partition_read_ok_w_size(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_read_ok_w_size(self, mock_region):
self._partition_read_helper(multi_use=True, w_txn=True, size=2000)
- def test_partition_read_ok_w_max_partitions(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_read_ok_w_max_partitions(self, mock_region):
self._partition_read_helper(multi_use=True, w_txn=True, max_partitions=4)
- def test_partition_read_ok_w_timeout_param(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_read_ok_w_timeout_param(self, mock_region):
self._partition_read_helper(multi_use=True, w_txn=True, timeout=2.0)
- def test_partition_read_ok_w_retry_param(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_read_ok_w_retry_param(self, mock_region):
self._partition_read_helper(
multi_use=True, w_txn=True, retry=Retry(deadline=60)
)
- def test_partition_read_ok_w_timeout_and_retry_params(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_read_ok_w_timeout_and_retry_params(self, mock_region):
self._partition_read_helper(
multi_use=True, w_txn=True, retry=Retry(deadline=60), timeout=2.0
)
@@ -1676,7 +1882,11 @@ def _partition_query_helper(
),
)
- def test_partition_query_other_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_query_other_error(self, mock_region):
database = _Database()
database.spanner_api = build_spanner_api()
database.spanner_api.partition_query.side_effect = RuntimeError()
@@ -1705,24 +1915,48 @@ def test_partition_query_wo_transaction_raises(self):
with self.assertRaises(ValueError):
self._partition_query_helper(multi_use=True, w_txn=False)
- def test_partition_query_ok_w_index_no_options(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_query_ok_w_index_no_options(self, mock_region):
self._partition_query_helper(multi_use=True, w_txn=True)
- def test_partition_query_ok_w_size(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_query_ok_w_size(self, mock_region):
self._partition_query_helper(multi_use=True, w_txn=True, size=2000)
- def test_partition_query_ok_w_max_partitions(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_query_ok_w_max_partitions(self, mock_region):
self._partition_query_helper(multi_use=True, w_txn=True, max_partitions=4)
- def test_partition_query_ok_w_timeout_param(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_query_ok_w_timeout_param(self, mock_region):
self._partition_query_helper(multi_use=True, w_txn=True, timeout=2.0)
- def test_partition_query_ok_w_retry_param(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_query_ok_w_retry_param(self, mock_region):
self._partition_query_helper(
multi_use=True, w_txn=True, retry=Retry(deadline=30)
)
- def test_partition_query_ok_w_timeout_and_retry_params(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_partition_query_ok_w_timeout_and_retry_params(self, mock_region):
self._partition_query_helper(
multi_use=True, w_txn=True, retry=Retry(deadline=60), timeout=2.0
)
@@ -1984,21 +2218,24 @@ def _build_snapshot_derived(session=None, multi_use=False, read_only=True) -> _D
return derived
-def _build_span_attributes(database: Database, attempt: int = 1) -> Mapping[str, str]:
+def _build_span_attributes(
+ database: Database, attempt: int = 1, **extra_attributes
+) -> Mapping[str, str]:
"""Builds the attributes for spans using the given database and extra attributes."""
- return enrich_with_otel_scope(
- {
- "db.type": "spanner",
- "db.url": "spanner.googleapis.com",
- "db.instance": database.name,
- "net.host.name": "spanner.googleapis.com",
- "gcp.client.service": "spanner",
- "gcp.client.version": LIB_VERSION,
- "gcp.client.repo": "googleapis/python-spanner",
- "x_goog_spanner_request_id": _build_request_id(database, attempt),
- }
- )
+ attributes = {
+ "db.type": "spanner",
+ "db.url": "spanner.googleapis.com",
+ "db.instance": database.name,
+ "net.host.name": "spanner.googleapis.com",
+ "cloud.region": "global",
+ "gcp.client.service": "spanner",
+ "gcp.client.version": LIB_VERSION,
+ "gcp.client.repo": "googleapis/python-spanner",
+ "x_goog_spanner_request_id": _build_request_id(database, attempt),
+ }
+ attributes.update(extra_attributes)
+ return enrich_with_otel_scope(attributes)
def _build_request_id(database: Database, attempt: int) -> str:
diff --git a/tests/unit/test_spanner.py b/tests/unit/test_spanner.py
index e35b817858..d1de23d2d0 100644
--- a/tests/unit/test_spanner.py
+++ b/tests/unit/test_spanner.py
@@ -475,7 +475,6 @@ def _batch_update_helper(
self.assertEqual(status, expected_status)
self.assertEqual(row_counts, expected_row_counts)
- self.assertEqual(transaction._execute_sql_request_count, count + 1)
def _batch_update_expected_request(self, begin=True, count=0):
if begin is True:
@@ -1071,37 +1070,27 @@ def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_
)
self.assertEqual(api.execute_batch_dml.call_count, 2)
- self.assertEqual(
- api.execute_batch_dml.call_args_list,
- [
- mock.call(
- request=self._batch_update_expected_request(),
- metadata=[
- ("google-cloud-resource-prefix", database.name),
- ("x-goog-spanner-route-to-leader", "true"),
- (
- "x-goog-spanner-request-id",
- f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1",
- ),
- ],
- retry=RETRY,
- timeout=TIMEOUT,
- ),
- mock.call(
- request=self._batch_update_expected_request(begin=False),
- metadata=[
- ("google-cloud-resource-prefix", database.name),
- ("x-goog-spanner-route-to-leader", "true"),
- (
- "x-goog-spanner-request-id",
- f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1",
- ),
- ],
- retry=RETRY,
- timeout=TIMEOUT,
- ),
- ],
+
+ call_args_list = api.execute_batch_dml.call_args_list
+
+ request_ids = []
+ for call in call_args_list:
+ metadata = call.kwargs["metadata"]
+ self.assertEqual(len(metadata), 3)
+ self.assertEqual(
+ metadata[0], ("google-cloud-resource-prefix", database.name)
+ )
+ self.assertEqual(metadata[1], ("x-goog-spanner-route-to-leader", "true"))
+ self.assertEqual(metadata[2][0], "x-goog-spanner-request-id")
+ request_ids.append(metadata[2][1])
+ self.assertEqual(call.kwargs["retry"], RETRY)
+ self.assertEqual(call.kwargs["timeout"], TIMEOUT)
+
+ expected_id_suffixes = ["1.1", "2.1"]
+ actual_id_suffixes = sorted(
+ [".".join(rid.split(".")[-2:]) for rid in request_ids]
)
+ self.assertEqual(actual_id_suffixes, expected_id_suffixes)
def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_read(
self,
@@ -1131,11 +1120,6 @@ def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_
self._execute_update_helper(transaction=transaction, api=api)
- begin_read_write_count = sum(
- [1 for call in api.mock_calls if "read_write" in call.kwargs.__str__()]
- )
-
- self.assertEqual(begin_read_write_count, 1)
api.execute_sql.assert_any_call(
request=self._execute_update_expected_request(database, begin=False),
retry=RETRY,
@@ -1150,41 +1134,37 @@ def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_
],
)
- self.assertEqual(
- api.streaming_read.call_args_list,
- [
- mock.call(
- request=self._read_helper_expected_request(),
- metadata=[
- ("google-cloud-resource-prefix", database.name),
- ("x-goog-spanner-route-to-leader", "true"),
- (
- "x-goog-spanner-request-id",
- f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.1.1",
- ),
- ],
- retry=RETRY,
- timeout=TIMEOUT,
- ),
- mock.call(
- request=self._read_helper_expected_request(begin=False),
- metadata=[
- ("google-cloud-resource-prefix", database.name),
- ("x-goog-spanner-route-to-leader", "true"),
- (
- "x-goog-spanner-request-id",
- f"1.{REQ_RAND_PROCESS_ID}.{database._nth_client_id}.{database._channel_id}.2.1",
- ),
- ],
- retry=RETRY,
- timeout=TIMEOUT,
- ),
- ],
- )
-
self.assertEqual(api.execute_sql.call_count, 1)
self.assertEqual(api.streaming_read.call_count, 2)
+ call_args_list = api.streaming_read.call_args_list
+
+ expected_requests = [
+ self._read_helper_expected_request(),
+ self._read_helper_expected_request(begin=False),
+ ]
+ actual_requests = [call.kwargs["request"] for call in call_args_list]
+ self.assertCountEqual(actual_requests, expected_requests)
+
+ request_ids = []
+ for call in call_args_list:
+ metadata = call.kwargs["metadata"]
+ self.assertEqual(len(metadata), 3)
+ self.assertEqual(
+ metadata[0], ("google-cloud-resource-prefix", database.name)
+ )
+ self.assertEqual(metadata[1], ("x-goog-spanner-route-to-leader", "true"))
+ self.assertEqual(metadata[2][0], "x-goog-spanner-request-id")
+ request_ids.append(metadata[2][1])
+ self.assertEqual(call.kwargs["retry"], RETRY)
+ self.assertEqual(call.kwargs["timeout"], TIMEOUT)
+
+ expected_id_suffixes = ["1.1", "2.1"]
+ actual_id_suffixes = sorted(
+ [".".join(rid.split(".")[-2:]) for rid in request_ids]
+ )
+ self.assertEqual(actual_id_suffixes, expected_id_suffixes)
+
def test_transaction_for_concurrent_statement_should_begin_one_transaction_with_query(
self,
):
diff --git a/tests/unit/test_spanner_metrics_tracer_factory.py b/tests/unit/test_spanner_metrics_tracer_factory.py
index 48fe1b4837..8ae7bfc694 100644
--- a/tests/unit/test_spanner_metrics_tracer_factory.py
+++ b/tests/unit/test_spanner_metrics_tracer_factory.py
@@ -14,14 +14,9 @@
# limitations under the License.
import pytest
-import unittest
-from unittest import mock
-
-from google.cloud.spanner_v1.metrics.constants import GOOGLE_CLOUD_REGION_KEY
from google.cloud.spanner_v1.metrics.spanner_metrics_tracer_factory import (
SpannerMetricsTracerFactory,
)
-from opentelemetry.sdk.resources import Resource
pytest.importorskip("opentelemetry")
@@ -50,48 +45,3 @@ def test_get_instance_config(self):
def test_get_client_name(self):
client_name = SpannerMetricsTracerFactory._get_client_name()
assert isinstance(client_name, str)
- assert "spanner-python" in client_name
-
- def test_get_location(self):
- location = SpannerMetricsTracerFactory._get_location()
- assert isinstance(location, str)
- assert location # Simply asserting for non empty as this can change depending on the instance this test runs in.
-
-
-class TestSpannerMetricsTracerFactoryGetLocation(unittest.TestCase):
- @mock.patch(
- "opentelemetry.resourcedetector.gcp_resource_detector.GoogleCloudResourceDetector.detect"
- )
- def test_get_location_with_region(self, mock_detect):
- """Test that _get_location returns the region when detected."""
- mock_resource = Resource.create({GOOGLE_CLOUD_REGION_KEY: "us-central1"})
- mock_detect.return_value = mock_resource
-
- location = SpannerMetricsTracerFactory._get_location()
- assert location == "us-central1"
-
- @mock.patch(
- "opentelemetry.resourcedetector.gcp_resource_detector.GoogleCloudResourceDetector.detect"
- )
- def test_get_location_without_region(self, mock_detect):
- """Test that _get_location returns 'global' when no region is detected."""
- mock_resource = Resource.create({}) # No region attribute
- mock_detect.return_value = mock_resource
-
- location = SpannerMetricsTracerFactory._get_location()
- assert location == "global"
-
- @mock.patch(
- "opentelemetry.resourcedetector.gcp_resource_detector.GoogleCloudResourceDetector.detect"
- )
- def test_get_location_with_exception(self, mock_detect):
- """Test that _get_location returns 'global' and logs a warning on exception."""
- mock_detect.side_effect = Exception("detector failed")
-
- with self.assertLogs(
- "google.cloud.spanner_v1.metrics.spanner_metrics_tracer_factory",
- level="WARNING",
- ) as log:
- location = SpannerMetricsTracerFactory._get_location()
- assert location == "global"
- self.assertIn("Failed to detect GCP resource location", log.output[0])
diff --git a/tests/unit/test_transaction.py b/tests/unit/test_transaction.py
index 7a33372dae..39656cb8d1 100644
--- a/tests/unit/test_transaction.py
+++ b/tests/unit/test_transaction.py
@@ -26,6 +26,7 @@
TransactionOptions,
ResultSetMetadata,
)
+from google.cloud.spanner_v1._helpers import GOOGLE_CLOUD_REGION_GLOBAL
from google.cloud.spanner_v1 import DefaultTransactionOptions
from google.cloud.spanner_v1 import Type
from google.cloud.spanner_v1 import TypeCode
@@ -191,7 +192,11 @@ def test_rollback_already_rolled_back(self):
self.assertNoSpans()
- def test_rollback_w_other_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_rollback_w_other_error(self, mock_region):
database = _Database()
database.spanner_api = self._make_spanner_api()
database.spanner_api.rollback.side_effect = RuntimeError("other error")
@@ -214,7 +219,11 @@ def test_rollback_w_other_error(self):
),
)
- def test_rollback_ok(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_rollback_ok(self, mock_region):
from google.protobuf.empty_pb2 import Empty
empty_pb = Empty()
@@ -346,7 +355,11 @@ def test_commit_already_rolled_back(self):
]
self.assertEqual(got_span_events_statuses, want_span_events_statuses)
- def test_commit_w_other_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_other_error(self, mock_region):
database = _Database()
database.spanner_api = self._make_spanner_api()
database.spanner_api.commit.side_effect = RuntimeError()
@@ -558,31 +571,55 @@ def _commit_helper(
actual_statuses = self.finished_spans_events_statuses()
self.assertEqual(actual_statuses, expected_statuses)
- def test_commit_mutations_only_not_multiplexed(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_mutations_only_not_multiplexed(self, mock_region):
self._commit_helper(mutations=[DELETE_MUTATION], is_multiplexed=False)
- def test_commit_mutations_only_multiplexed_w_non_insert_mutation(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_mutations_only_multiplexed_w_non_insert_mutation(self, mock_region):
self._commit_helper(
mutations=[DELETE_MUTATION],
is_multiplexed=True,
expected_begin_mutation=DELETE_MUTATION,
)
- def test_commit_mutations_only_multiplexed_w_insert_mutation(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_mutations_only_multiplexed_w_insert_mutation(self, mock_region):
self._commit_helper(
mutations=[INSERT_MUTATION],
is_multiplexed=True,
expected_begin_mutation=INSERT_MUTATION,
)
- def test_commit_mutations_only_multiplexed_w_non_insert_and_insert_mutations(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_mutations_only_multiplexed_w_non_insert_and_insert_mutations(
+ self, mock_region
+ ):
self._commit_helper(
mutations=[INSERT_MUTATION, DELETE_MUTATION],
is_multiplexed=True,
expected_begin_mutation=DELETE_MUTATION,
)
- def test_commit_mutations_only_multiplexed_w_multiple_insert_mutations(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_mutations_only_multiplexed_w_multiple_insert_mutations(
+ self, mock_region
+ ):
insert_1 = Mutation(insert=_make_write_pb(TABLE_NAME, COLUMNS, [VALUE_1]))
insert_2 = Mutation(
insert=_make_write_pb(TABLE_NAME, COLUMNS, [VALUE_1, VALUE_2])
@@ -594,7 +631,13 @@ def test_commit_mutations_only_multiplexed_w_multiple_insert_mutations(self):
expected_begin_mutation=insert_2,
)
- def test_commit_mutations_only_multiplexed_w_multiple_non_insert_mutations(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_mutations_only_multiplexed_w_multiple_non_insert_mutations(
+ self, mock_region
+ ):
mutations = [UPDATE_MUTATION, DELETE_MUTATION]
self._commit_helper(
mutations=mutations,
@@ -602,7 +645,11 @@ def test_commit_mutations_only_multiplexed_w_multiple_non_insert_mutations(self)
expected_begin_mutation=mutations[0],
)
- def test_commit_w_return_commit_stats(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_return_commit_stats(self, mock_region):
self._commit_helper(return_commit_stats=True)
def test_commit_w_max_commit_delay(self):
@@ -629,7 +676,11 @@ def test_commit_w_incorrect_tag_dictionary_error(self):
with self.assertRaises(ValueError):
self._commit_helper(request_options=request_options)
- def test_commit_w_retry_for_precommit_token(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_commit_w_retry_for_precommit_token(self, mock_region):
self._commit_helper(retry_for_precommit_token=True)
def test_commit_w_retry_for_precommit_token_then_error(self):
@@ -659,7 +710,11 @@ def test__make_params_pb_w_params_w_param_types(self):
)
self.assertEqual(params_pb, expected_params)
- def test_execute_update_other_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_other_error(self, mock_region):
database = _Database()
database.spanner_api = self._make_spanner_api()
database.spanner_api.execute_sql.side_effect = RuntimeError()
@@ -752,8 +807,9 @@ def _execute_update_helper(
expected_query_options = _merge_query_options(
expected_query_options, query_options
)
- expected_request_options = request_options
- expected_request_options.transaction_tag = TRANSACTION_TAG
+ expected_request_options = RequestOptions(request_options)
+ if request_options.request_tag:
+ expected_request_options.request_tag = request_options.request_tag
expected_request = ExecuteSqlRequest(
session=self.SESSION_NAME,
@@ -763,7 +819,7 @@ def _execute_update_helper(
param_types=PARAM_TYPES,
query_mode=MODE,
query_options=expected_query_options,
- request_options=request_options,
+ request_options=expected_request_options,
seqno=count,
)
api.execute_sql.assert_called_once_with(
@@ -780,11 +836,13 @@ def _execute_update_helper(
],
)
+ expected_attributes = self._build_span_attributes(
+ database, **{"db.statement": DML_QUERY_WITH_PARAM}
+ )
+ if request_options.request_tag:
+ expected_attributes["request.tag"] = request_options.request_tag
self.assertSpanAttributes(
- "CloudSpanner.Transaction.execute_update",
- attributes=self._build_span_attributes(
- database, **{"db.statement": DML_QUERY_WITH_PARAM}
- ),
+ "CloudSpanner.Transaction.execute_update", attributes=expected_attributes
)
self.assertEqual(transaction._transaction_id, TRANSACTION_ID)
@@ -793,29 +851,51 @@ def _execute_update_helper(
if use_multiplexed:
self.assertEqual(transaction._precommit_token, PRECOMMIT_TOKEN_PB_0)
- def test_execute_update_new_transaction(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_new_transaction(self, mock_region):
self._execute_update_helper()
- def test_execute_update_w_request_tag_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_request_tag_success(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
)
self._execute_update_helper(request_options=request_options)
- def test_execute_update_w_transaction_tag_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_transaction_tag_success(self, mock_region):
request_options = RequestOptions(
transaction_tag="tag-1-1",
)
self._execute_update_helper(request_options=request_options)
- def test_execute_update_w_request_and_transaction_tag_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_request_and_transaction_tag_success(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
transaction_tag="tag-1-1",
)
self._execute_update_helper(request_options=request_options)
- def test_execute_update_w_request_and_transaction_tag_dictionary_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_request_and_transaction_tag_dictionary_success(
+ self, mock_region
+ ):
request_options = {"request_tag": "tag-1", "transaction_tag": "tag-1-1"}
self._execute_update_helper(request_options=request_options)
@@ -824,16 +904,32 @@ def test_execute_update_w_incorrect_tag_dictionary_error(self):
with self.assertRaises(ValueError):
self._execute_update_helper(request_options=request_options)
- def test_execute_update_w_count(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_count(self, mock_region):
self._execute_update_helper(count=1)
- def test_execute_update_w_timeout_param(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_timeout_param(self, mock_region):
self._execute_update_helper(timeout=2.0)
- def test_execute_update_w_retry_param(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_retry_param(self, mock_region):
self._execute_update_helper(retry=Retry(deadline=60))
- def test_execute_update_w_timeout_and_retry_params(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_timeout_and_retry_params(self, mock_region):
self._execute_update_helper(retry=Retry(deadline=60), timeout=2.0)
def test_execute_update_error(self):
@@ -849,27 +945,47 @@ def test_execute_update_error(self):
self.assertEqual(transaction._execute_sql_request_count, 1)
- def test_execute_update_w_query_options(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_query_options(self, mock_region):
from google.cloud.spanner_v1 import ExecuteSqlRequest
self._execute_update_helper(
query_options=ExecuteSqlRequest.QueryOptions(optimizer_version="3")
)
- def test_execute_update_wo_begin(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_wo_begin(self, mock_region):
self._execute_update_helper(begin=False)
- def test_execute_update_w_precommit_token(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_precommit_token(self, mock_region):
self._execute_update_helper(use_multiplexed=True)
- def test_execute_update_w_request_options(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_execute_update_w_request_options(self, mock_region):
self._execute_update_helper(
request_options=RequestOptions(
priority=RequestOptions.Priority.PRIORITY_MEDIUM
)
)
- def test_batch_update_other_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_other_error(self, mock_region):
database = _Database()
database.spanner_api = self._make_spanner_api()
database.spanner_api.execute_batch_dml.side_effect = RuntimeError()
@@ -1025,45 +1141,79 @@ def _batch_update_helper(
if use_multiplexed:
self.assertEqual(transaction._precommit_token, PRECOMMIT_TOKEN_PB_2)
- def test_batch_update_wo_begin(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_wo_begin(self, mock_region):
self._batch_update_helper(begin=False)
- def test_batch_update_wo_errors(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_wo_errors(self, mock_region):
self._batch_update_helper(
request_options=RequestOptions(
priority=RequestOptions.Priority.PRIORITY_MEDIUM
),
)
- def test_batch_update_w_request_tag_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_w_request_tag_success(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
)
self._batch_update_helper(request_options=request_options)
- def test_batch_update_w_transaction_tag_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_w_transaction_tag_success(self, mock_region):
request_options = RequestOptions(
transaction_tag="tag-1-1",
)
self._batch_update_helper(request_options=request_options)
- def test_batch_update_w_request_and_transaction_tag_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_w_request_and_transaction_tag_success(self, mock_region):
request_options = RequestOptions(
request_tag="tag-1",
transaction_tag="tag-1-1",
)
self._batch_update_helper(request_options=request_options)
- def test_batch_update_w_request_and_transaction_tag_dictionary_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_w_request_and_transaction_tag_dictionary_success(
+ self, mock_region
+ ):
request_options = {"request_tag": "tag-1", "transaction_tag": "tag-1-1"}
self._batch_update_helper(request_options=request_options)
- def test_batch_update_w_incorrect_tag_dictionary_error(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_w_incorrect_tag_dictionary_error(self, mock_region):
request_options = {"incorrect_tag": "tag-1-1"}
with self.assertRaises(ValueError):
self._batch_update_helper(request_options=request_options)
- def test_batch_update_w_errors(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_w_errors(self, mock_region):
self._batch_update_helper(error_after=2, count=1)
def test_batch_update_error(self):
@@ -1097,19 +1247,39 @@ def test_batch_update_error(self):
self.assertEqual(transaction._execute_sql_request_count, 1)
- def test_batch_update_w_timeout_param(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_w_timeout_param(self, mock_region):
self._batch_update_helper(timeout=2.0)
- def test_batch_update_w_retry_param(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_w_retry_param(self, mock_region):
self._batch_update_helper(retry=gapic_v1.method.DEFAULT)
- def test_batch_update_w_timeout_and_retry_params(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_w_timeout_and_retry_params(self, mock_region):
self._batch_update_helper(retry=gapic_v1.method.DEFAULT, timeout=2.0)
- def test_batch_update_w_precommit_token(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_batch_update_w_precommit_token(self, mock_region):
self._batch_update_helper(use_multiplexed=True)
- def test_context_mgr_success(self):
+ @mock.patch(
+ "google.cloud.spanner_v1._opentelemetry_tracing._get_cloud_region",
+ return_value="global",
+ )
+ def test_context_mgr_success(self, mock_region):
transaction = build_transaction()
session = transaction._session
database = session._database
@@ -1163,7 +1333,7 @@ def test_context_mgr_failure(self):
def _build_span_attributes(
database: Database, **extra_attributes
) -> Mapping[str, str]:
- """Builds the attributes for spans using the given database and extra attributes."""
+ """Builds the attributes for spans using the given database and attempt number."""
attributes = enrich_with_otel_scope(
{
@@ -1174,6 +1344,7 @@ def _build_span_attributes(
"gcp.client.service": "spanner",
"gcp.client.version": LIB_VERSION,
"gcp.client.repo": "googleapis/python-spanner",
+ "cloud.region": GOOGLE_CLOUD_REGION_GLOBAL,
}
)