diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 9602d54059..cb06536dab 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
image: gcr.io/repo-automation-bots/owlbot-python:latest
- digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3
+ digest: sha256:5ff7446edeaede81c3ed58b23a4e76a5403fba1350ce28478045657303b6479d
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
index 412b0b56a9..4e1b1fb8b5 100644
--- a/.kokoro/docker/docs/Dockerfile
+++ b/.kokoro/docker/docs/Dockerfile
@@ -40,6 +40,7 @@ RUN apt-get update \
libssl-dev \
libsqlite3-dev \
portaudio19-dev \
+ python3-distutils \
redis-server \
software-properties-common \
ssh \
@@ -59,40 +60,8 @@ RUN apt-get update \
&& rm -rf /var/lib/apt/lists/* \
&& rm -f /var/cache/apt/archives/*.deb
-
-COPY fetch_gpg_keys.sh /tmp
-# Install the desired versions of Python.
-RUN set -ex \
- && export GNUPGHOME="$(mktemp -d)" \
- && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \
- && /tmp/fetch_gpg_keys.sh \
- && for PYTHON_VERSION in 3.7.8 3.8.5; do \
- wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \
- && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \
- && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \
- && rm -r python-${PYTHON_VERSION}.tar.xz.asc \
- && mkdir -p /usr/src/python-${PYTHON_VERSION} \
- && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \
- && rm python-${PYTHON_VERSION}.tar.xz \
- && cd /usr/src/python-${PYTHON_VERSION} \
- && ./configure \
- --enable-shared \
- # This works only on Python 2.7 and throws a warning on every other
- # version, but seems otherwise harmless.
- --enable-unicode=ucs4 \
- --with-system-ffi \
- --without-ensurepip \
- && make -j$(nproc) \
- && make install \
- && ldconfig \
- ; done \
- && rm -rf "${GNUPGHOME}" \
- && rm -rf /usr/src/python* \
- && rm -rf ~/.cache/
-
RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
- && python3.7 /tmp/get-pip.py \
&& python3.8 /tmp/get-pip.py \
&& rm /tmp/get-pip.py
-CMD ["python3.7"]
+CMD ["python3.8"]
diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh
deleted file mode 100755
index d653dd868e..0000000000
--- a/.kokoro/docker/docs/fetch_gpg_keys.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/bash
-# Copyright 2020 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# A script to fetch gpg keys with retry.
-# Avoid jinja parsing the file.
-#
-
-function retry {
- if [[ "${#}" -le 1 ]]; then
- echo "Usage: ${0} retry_count commands.."
- exit 1
- fi
- local retries=${1}
- local command="${@:2}"
- until [[ "${retries}" -le 0 ]]; do
- $command && return 0
- if [[ $? -ne 0 ]]; then
- echo "command failed, retrying"
- ((retries--))
- fi
- done
- return 1
-}
-
-# 3.6.9, 3.7.5 (Ned Deily)
-retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
- 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D
-
-# 3.8.0 (Łukasz Langa)
-retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
- E3FF2839C048B25C084DEBE9B26995E310250568
-
-#
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
new file mode 100644
index 0000000000..a62ce6bdd2
--- /dev/null
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.9"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py39"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-spanner/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-spanner/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/python3.9/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg
new file mode 100644
index 0000000000..f9cfcd33e0
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
new file mode 100644
index 0000000000..50fec96497
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg
new file mode 100644
index 0000000000..a1c8d9759c
--- /dev/null
+++ b/.kokoro/samples/python3.9/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
index cf5de74c17..311a8d54b9 100755
--- a/.kokoro/test-samples-impl.sh
+++ b/.kokoro/test-samples-impl.sh
@@ -20,9 +20,9 @@ set -eo pipefail
# Enables `**` to include files nested inside sub-folders
shopt -s globstar
-# Exit early if samples directory doesn't exist
-if [ ! -d "./samples" ]; then
- echo "No tests run. `./samples` not found"
+# Exit early if samples don't exist
+if ! find samples -name 'requirements.txt' | grep -q .; then
+ echo "No tests run. './samples/**/requirements.txt' not found"
exit 0
fi
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6e9caf08c6..4d7cda8919 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,30 @@
[1]: https://pypi.org/project/google-cloud-spanner/#history
+## [3.7.0](https://www.github.com/googleapis/python-spanner/compare/v3.6.0...v3.7.0) (2021-07-29)
+
+
+### Features
+
+* add always_use_jwt_access ([#381](https://www.github.com/googleapis/python-spanner/issues/381)) ([0f1a5de](https://www.github.com/googleapis/python-spanner/commit/0f1a5ded572685a96d29a60c959cb00a48f7a87f))
+* add configurable leader placement support ([#399](https://www.github.com/googleapis/python-spanner/issues/399)) ([7f1b120](https://www.github.com/googleapis/python-spanner/commit/7f1b1209e62062014545cf959d41f04184552eec))
+* add sample for low cost instances ([#392](https://www.github.com/googleapis/python-spanner/issues/392)) ([3f4f93f](https://www.github.com/googleapis/python-spanner/commit/3f4f93f75f5585a82047bf8d83a24622ad776ecb))
+
+
+### Bug Fixes
+
+* avoid bad version of `opentelemetry-instrumentation` ([#429](https://www.github.com/googleapis/python-spanner/issues/429)) ([1620c12](https://www.github.com/googleapis/python-spanner/commit/1620c12a56e0d007cf010690bab303db06d0c914))
+* **deps:** pin 'google-{api,cloud}-core' to allow 2.x versions ([#415](https://www.github.com/googleapis/python-spanner/issues/415)) ([b0455d0](https://www.github.com/googleapis/python-spanner/commit/b0455d0ab657cd053a7527e99bdbfadc4de23b30))
+* disable always_use_jwt_access ([c37bf21](https://www.github.com/googleapis/python-spanner/commit/c37bf21afdf417757eff67fe8500aa65f49fd5ad))
+* disable always_use_jwt_access ([#395](https://www.github.com/googleapis/python-spanner/issues/395)) ([c37bf21](https://www.github.com/googleapis/python-spanner/commit/c37bf21afdf417757eff67fe8500aa65f49fd5ad))
+* enable self signed jwt for grpc ([#427](https://www.github.com/googleapis/python-spanner/issues/427)) ([2487800](https://www.github.com/googleapis/python-spanner/commit/2487800e31842a44dcc37937c325e130c8c926b0))
+* support merging for NUMERIC values ([#434](https://www.github.com/googleapis/python-spanner/issues/434)) ([06b4215](https://www.github.com/googleapis/python-spanner/commit/06b4215f76ae806eba1d0d07115c8c90b8c7482d)), closes [#433](https://www.github.com/googleapis/python-spanner/issues/433)
+
+
+### Documentation
+
+* fix docstring for session.py ([#387](https://www.github.com/googleapis/python-spanner/issues/387)) ([3132587](https://www.github.com/googleapis/python-spanner/commit/3132587453f7bd0be72ebc393626b5c8b1bab982))
+
## [3.6.0](https://www.github.com/googleapis/python-spanner/compare/v3.5.0...v3.6.0) (2021-06-23)
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 3df455e996..6ddd60e7c1 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -68,14 +68,12 @@ Using ``nox``
We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
+ $ nox -s unit
- $ nox -s unit-3.8
- $ ...
+- To run a single unit test::
-- Args to pytest can be passed through the nox command separated by a `--`. For
- example, to run a single test::
+ $ nox -s unit-3.9 -- -k
- $ nox -s unit-3.8 -- -k
.. note::
@@ -142,7 +140,7 @@ Running System Tests
- To run system tests, you can execute::
# Run all system tests
- $ nox -s system-3.8
+ $ nox -s system
# Run a single system test
$ nox -s system-3.8 -- -k
@@ -215,8 +213,8 @@ Supported versions can be found in our ``noxfile.py`` `config`_.
.. _config: https://github.com/googleapis/python-spanner/blob/master/noxfile.py
-We also explicitly decided to support Python 3 beginning with version
-3.6. Reasons for this include:
+We also explicitly decided to support Python 3 beginning with version 3.6.
+Reasons for this include:
- Encouraging use of newest versions of Python 3
- Taking the lead of `prominent`_ open-source `projects`_
diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py
index 47a7026339..1100d160c5 100644
--- a/google/cloud/spanner_admin_database_v1/services/database_admin/client.py
+++ b/google/cloud/spanner_admin_database_v1/services/database_admin/client.py
@@ -435,6 +435,10 @@ def __init__(
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
+ always_use_jwt_access=(
+ Transport == type(self).get_transport_class("grpc")
+ or Transport == type(self).get_transport_class("grpc_asyncio")
+ ),
)
def list_databases(
diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py
index 66574db79c..ec8cafa77f 100644
--- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py
+++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/base.py
@@ -25,6 +25,7 @@
from google.api_core import retry as retries # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
from google.cloud.spanner_admin_database_v1.types import backup
from google.cloud.spanner_admin_database_v1.types import backup as gsad_backup
@@ -52,8 +53,6 @@
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
-_API_CORE_VERSION = google.api_core.__version__
-
class DatabaseAdminTransport(abc.ABC):
"""Abstract transport class for DatabaseAdmin."""
@@ -74,6 +73,7 @@ def __init__(
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -97,6 +97,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -106,7 +108,7 @@ def __init__(
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
- self._scopes = scopes or self.AUTH_SCOPES
+ self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
@@ -125,13 +127,20 @@ def __init__(
**scopes_kwargs, quota_project_id=quota_project_id
)
+ # If the credentials is service account credentials, then always try to use self signed JWT.
+ if (
+ always_use_jwt_access
+ and isinstance(credentials, service_account.Credentials)
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(True)
+
# Save the credentials.
self._credentials = credentials
- # TODO(busunkim): These two class methods are in the base transport
+ # TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
- # should be deleted once the minimum required versions of google-api-core
- # and google-auth are increased.
+ # should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
@@ -152,27 +161,6 @@ def _get_scopes_kwargs(
return scopes_kwargs
- # TODO: Remove this function once google-api-core >= 1.26.0 is required
- @classmethod
- def _get_self_signed_jwt_kwargs(
- cls, host: str, scopes: Optional[Sequence[str]]
- ) -> Dict[str, Union[Optional[Sequence[str]], str]]:
- """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version"""
-
- self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {}
-
- if _API_CORE_VERSION and (
- packaging.version.parse(_API_CORE_VERSION)
- >= packaging.version.parse("1.26.0")
- ):
- self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES
- self_signed_jwt_kwargs["scopes"] = scopes
- self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST
- else:
- self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES
-
- return self_signed_jwt_kwargs
-
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py
index 043d5fd1c2..00c46cf906 100644
--- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py
+++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc.py
@@ -69,6 +69,7 @@ def __init__(
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
@@ -109,6 +110,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -162,6 +165,7 @@ def __init__(
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
@@ -217,14 +221,14 @@ def create_channel(
and ``credentials_file`` are passed.
"""
- self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
-
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
- **self_signed_jwt_kwargs,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
**kwargs,
)
diff --git a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py
index 9ca356617f..49832746ea 100644
--- a/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py
+++ b/google/cloud/spanner_admin_database_v1/services/database_admin/transports/grpc_asyncio.py
@@ -90,14 +90,14 @@ def create_channel(
aio.Channel: A gRPC AsyncIO channel object.
"""
- self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
-
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
- **self_signed_jwt_kwargs,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
**kwargs,
)
@@ -115,6 +115,7 @@ def __init__(
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
@@ -156,6 +157,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -208,6 +211,7 @@ def __init__(
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
diff --git a/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py b/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py
index 5824f575a5..e7aee2ac1e 100644
--- a/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py
+++ b/google/cloud/spanner_admin_database_v1/types/spanner_database_admin.py
@@ -121,6 +121,14 @@ class Database(proto.Message):
sure to account for the time from the moment
when the value is queried to the moment when you
initiate the recovery.
+ default_leader (str):
+ Output only. The read-write region which contains the
+ database's leader replicas.
+
+ This is the same as the value of default_leader database
+ option set using DatabaseAdmin.CreateDatabase or
+ DatabaseAdmin.UpdateDatabaseDdl. If not explicitly set, this
+ is empty.
"""
class State(proto.Enum):
@@ -144,6 +152,7 @@ class State(proto.Enum):
earliest_version_time = proto.Field(
proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,
)
+ default_leader = proto.Field(proto.STRING, number=9,)
class ListDatabasesRequest(proto.Message):
diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py
index 248478dd80..2f6187e0a2 100644
--- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py
+++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/client.py
@@ -381,6 +381,10 @@ def __init__(
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
+ always_use_jwt_access=(
+ Transport == type(self).get_transport_class("grpc")
+ or Transport == type(self).get_transport_class("grpc_asyncio")
+ ),
)
def list_instance_configs(
diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py
index 5b6f2b655d..78ff62b585 100644
--- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py
+++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/base.py
@@ -25,6 +25,7 @@
from google.api_core import retry as retries # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
from google.cloud.spanner_admin_instance_v1.types import spanner_instance_admin
from google.iam.v1 import iam_policy_pb2 # type: ignore
@@ -50,8 +51,6 @@
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
-_API_CORE_VERSION = google.api_core.__version__
-
class InstanceAdminTransport(abc.ABC):
"""Abstract transport class for InstanceAdmin."""
@@ -72,6 +71,7 @@ def __init__(
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -95,6 +95,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -104,7 +106,7 @@ def __init__(
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
- self._scopes = scopes or self.AUTH_SCOPES
+ self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
@@ -123,13 +125,20 @@ def __init__(
**scopes_kwargs, quota_project_id=quota_project_id
)
+ # If the credentials is service account credentials, then always try to use self signed JWT.
+ if (
+ always_use_jwt_access
+ and isinstance(credentials, service_account.Credentials)
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(True)
+
# Save the credentials.
self._credentials = credentials
- # TODO(busunkim): These two class methods are in the base transport
+ # TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
- # should be deleted once the minimum required versions of google-api-core
- # and google-auth are increased.
+ # should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
@@ -150,27 +159,6 @@ def _get_scopes_kwargs(
return scopes_kwargs
- # TODO: Remove this function once google-api-core >= 1.26.0 is required
- @classmethod
- def _get_self_signed_jwt_kwargs(
- cls, host: str, scopes: Optional[Sequence[str]]
- ) -> Dict[str, Union[Optional[Sequence[str]], str]]:
- """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version"""
-
- self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {}
-
- if _API_CORE_VERSION and (
- packaging.version.parse(_API_CORE_VERSION)
- >= packaging.version.parse("1.26.0")
- ):
- self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES
- self_signed_jwt_kwargs["scopes"] = scopes
- self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST
- else:
- self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES
-
- return self_signed_jwt_kwargs
-
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py
index 234d71e802..6f2c4caa6e 100644
--- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py
+++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc.py
@@ -82,6 +82,7 @@ def __init__(
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
@@ -122,6 +123,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -175,6 +178,7 @@ def __init__(
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
@@ -230,14 +234,14 @@ def create_channel(
and ``credentials_file`` are passed.
"""
- self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
-
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
- **self_signed_jwt_kwargs,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
**kwargs,
)
diff --git a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py
index a7e9acdc61..3e573e71c0 100644
--- a/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py
+++ b/google/cloud/spanner_admin_instance_v1/services/instance_admin/transports/grpc_asyncio.py
@@ -103,14 +103,14 @@ def create_channel(
aio.Channel: A gRPC AsyncIO channel object.
"""
- self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
-
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
- **self_signed_jwt_kwargs,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
**kwargs,
)
@@ -128,6 +128,7 @@ def __init__(
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
@@ -169,6 +170,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -221,6 +224,7 @@ def __init__(
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
diff --git a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py
index d8cef6ea2b..e55a5961b0 100644
--- a/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py
+++ b/google/cloud/spanner_admin_instance_v1/types/spanner_instance_admin.py
@@ -88,11 +88,15 @@ class InstanceConfig(proto.Message):
The geographic placement of nodes in this
instance configuration and their replication
properties.
+ leader_options (Sequence[str]):
+ Allowed values of the “default_leader” schema option for
+ databases in instances that use this instance configuration.
"""
name = proto.Field(proto.STRING, number=1,)
display_name = proto.Field(proto.STRING, number=2,)
replicas = proto.RepeatedField(proto.MESSAGE, number=3, message="ReplicaInfo",)
+ leader_options = proto.RepeatedField(proto.STRING, number=4,)
class Instance(proto.Message):
diff --git a/google/cloud/spanner_v1/database.py b/google/cloud/spanner_v1/database.py
index fae983f334..3d62737e03 100644
--- a/google/cloud/spanner_v1/database.py
+++ b/google/cloud/spanner_v1/database.py
@@ -144,6 +144,7 @@ def __init__(
self._version_retention_period = None
self._earliest_version_time = None
self._encryption_info = None
+ self._default_leader = None
self.log_commit_stats = False
self._logger = logger
self._encryption_config = encryption_config
@@ -279,6 +280,15 @@ def encryption_info(self):
"""
return self._encryption_info
+ @property
+ def default_leader(self):
+ """The read-write region which contains the database's leader replicas.
+
+ :rtype: str
+ :returns: a string representing the read-write region
+ """
+ return self._default_leader
+
@property
def ddl_statements(self):
"""DDL Statements used to define database schema.
@@ -414,6 +424,7 @@ def reload(self):
self._earliest_version_time = response.earliest_version_time
self._encryption_config = response.encryption_config
self._encryption_info = response.encryption_info
+ self._default_leader = response.default_leader
def update_ddl(self, ddl_statements, operation_id=""):
"""Update DDL for this database.
diff --git a/google/cloud/spanner_v1/services/spanner/client.py b/google/cloud/spanner_v1/services/spanner/client.py
index 526dc5af73..0acc775d60 100644
--- a/google/cloud/spanner_v1/services/spanner/client.py
+++ b/google/cloud/spanner_v1/services/spanner/client.py
@@ -368,6 +368,10 @@ def __init__(
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
+ always_use_jwt_access=(
+ Transport == type(self).get_transport_class("grpc")
+ or Transport == type(self).get_transport_class("grpc_asyncio")
+ ),
)
def create_session(
diff --git a/google/cloud/spanner_v1/services/spanner/transports/base.py b/google/cloud/spanner_v1/services/spanner/transports/base.py
index b0fb6c3d63..d230d79bc1 100644
--- a/google/cloud/spanner_v1/services/spanner/transports/base.py
+++ b/google/cloud/spanner_v1/services/spanner/transports/base.py
@@ -24,6 +24,7 @@
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
from google.cloud.spanner_v1.types import commit_response
from google.cloud.spanner_v1.types import result_set
@@ -47,8 +48,6 @@
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
-_API_CORE_VERSION = google.api_core.__version__
-
class SpannerTransport(abc.ABC):
"""Abstract transport class for Spanner."""
@@ -69,6 +68,7 @@ def __init__(
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -92,6 +92,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -101,7 +103,7 @@ def __init__(
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
- self._scopes = scopes or self.AUTH_SCOPES
+ self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
@@ -120,13 +122,20 @@ def __init__(
**scopes_kwargs, quota_project_id=quota_project_id
)
+ # If the credentials is service account credentials, then always try to use self signed JWT.
+ if (
+ always_use_jwt_access
+ and isinstance(credentials, service_account.Credentials)
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(True)
+
# Save the credentials.
self._credentials = credentials
- # TODO(busunkim): These two class methods are in the base transport
+ # TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
- # should be deleted once the minimum required versions of google-api-core
- # and google-auth are increased.
+ # should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
@@ -147,27 +156,6 @@ def _get_scopes_kwargs(
return scopes_kwargs
- # TODO: Remove this function once google-api-core >= 1.26.0 is required
- @classmethod
- def _get_self_signed_jwt_kwargs(
- cls, host: str, scopes: Optional[Sequence[str]]
- ) -> Dict[str, Union[Optional[Sequence[str]], str]]:
- """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version"""
-
- self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {}
-
- if _API_CORE_VERSION and (
- packaging.version.parse(_API_CORE_VERSION)
- >= packaging.version.parse("1.26.0")
- ):
- self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES
- self_signed_jwt_kwargs["scopes"] = scopes
- self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST
- else:
- self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES
-
- return self_signed_jwt_kwargs
-
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc.py b/google/cloud/spanner_v1/services/spanner/transports/grpc.py
index 15e97c4446..66e9227290 100644
--- a/google/cloud/spanner_v1/services/spanner/transports/grpc.py
+++ b/google/cloud/spanner_v1/services/spanner/transports/grpc.py
@@ -63,6 +63,7 @@ def __init__(
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
@@ -103,6 +104,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -155,6 +158,7 @@ def __init__(
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
@@ -210,14 +214,14 @@ def create_channel(
and ``credentials_file`` are passed.
"""
- self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
-
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
- **self_signed_jwt_kwargs,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
**kwargs,
)
diff --git a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py
index f87b4504de..ad78c2325e 100644
--- a/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py
+++ b/google/cloud/spanner_v1/services/spanner/transports/grpc_asyncio.py
@@ -84,14 +84,14 @@ def create_channel(
aio.Channel: A gRPC AsyncIO channel object.
"""
- self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
-
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
- **self_signed_jwt_kwargs,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
**kwargs,
)
@@ -109,6 +109,7 @@ def __init__(
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
@@ -150,6 +151,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -201,6 +204,7 @@ def __init__(
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
diff --git a/google/cloud/spanner_v1/session.py b/google/cloud/spanner_v1/session.py
index 84b65429d6..99ec8a69dd 100644
--- a/google/cloud/spanner_v1/session.py
+++ b/google/cloud/spanner_v1/session.py
@@ -110,7 +110,7 @@ def create(self):
See
https://cloud.google.com/spanner/reference/rpc/google.spanner.v1#google.spanner.v1.Spanner.CreateSession
- :raises: :exc:`ValueError` if :attr:`session_id` is already set.
+ :raises ValueError: if :attr:`session_id` is already set.
"""
if self._session_id is not None:
raise ValueError("Session ID already set by back-end")
@@ -171,7 +171,7 @@ def delete(self):
def ping(self):
"""Ping the session to keep it alive by executing "SELECT 1".
- :raises: ValueError: if :attr:`session_id` is not already set.
+ :raises ValueError: if :attr:`session_id` is not already set.
"""
if self._session_id is None:
raise ValueError("Session ID not set by back-end")
diff --git a/google/cloud/spanner_v1/streamed.py b/google/cloud/spanner_v1/streamed.py
index e5f7e4984e..9ee04867b3 100644
--- a/google/cloud/spanner_v1/streamed.py
+++ b/google/cloud/spanner_v1/streamed.py
@@ -315,6 +315,7 @@ def _merge_struct(lhs, rhs, type_):
TypeCode.STRING: _merge_string,
TypeCode.STRUCT: _merge_struct,
TypeCode.TIMESTAMP: _merge_string,
+ TypeCode.NUMERIC: _merge_string,
}
diff --git a/google/cloud/spanner_v1/types/type.py b/google/cloud/spanner_v1/types/type.py
index e06e5fc5b0..42754d974c 100644
--- a/google/cloud/spanner_v1/types/type.py
+++ b/google/cloud/spanner_v1/types/type.py
@@ -41,6 +41,7 @@ class TypeCode(proto.Enum):
ARRAY = 8
STRUCT = 9
NUMERIC = 10
+ JSON = 11
class Type(proto.Message):
diff --git a/noxfile.py b/noxfile.py
index efc4f53738..6579eecd49 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -64,14 +64,7 @@ def lint(session):
@nox.session(python=DEFAULT_PYTHON_VERSION)
def blacken(session):
- """Run black.
-
- Format code to uniform standard.
-
- This currently uses Python 3.6 due to the automated Kokoro run of synthtool.
- That run uses an image that doesn't have 3.6 installed. Before updating this
- check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
- """
+ """Run black. Format code to uniform standard."""
session.install(BLACK_VERSION)
session.run(
"black", *BLACK_PATHS,
@@ -156,6 +149,10 @@ def system(session):
"Credentials or emulator host must be set via environment variable"
)
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
+
system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
# Sanity check: only run tests if found.
@@ -172,9 +169,21 @@ def system(session):
# Run py.test against the system tests.
if system_test_exists:
- session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
if system_test_folder_exists:
- session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -195,7 +204,7 @@ def docs(session):
"""Build the docs for this library."""
session.install("-e", ".[tracing]")
- session.install("sphinx", "alabaster", "recommonmark")
+ session.install("sphinx==4.0.1", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
@@ -217,7 +226,9 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".[tracing]")
- session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml")
+ session.install(
+ "sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml"
+ )
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/owlbot.py b/owlbot.py
index 0899ba8d90..635dc54225 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -109,8 +109,8 @@ def get_staging_dirs(
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(microgenerator=True, samples=True)
-s.move(templated_files, excludes=[".coveragerc", "noxfile.py"])
+templated_files = common.py_library(microgenerator=True, samples=True, cov_level=99)
+s.move(templated_files, excludes=[".coveragerc"])
# Ensure CI runs on a new instance each time
s.replace(
@@ -127,4 +127,85 @@ def get_staging_dirs(
python.py_samples()
+# ----------------------------------------------------------------------------
+# Customize noxfile.py
+# ----------------------------------------------------------------------------
+
+def place_before(path, text, *before_text, escape=None):
+ replacement = "\n".join(before_text) + "\n" + text
+ if escape:
+ for c in escape:
+ text = text.replace(c, '\\' + c)
+ s.replace([path], text, replacement)
+
+open_telemetry_test = """
+ session.install("-e", ".[tracing]", "-c", constraints_path)
+
+ # Run py.test against the unit tests with OpenTelemetry.
+ session.run(
+ "py.test",
+ "--quiet",
+ "--cov=google.cloud.spanner",
+ "--cov=google.cloud",
+ "--cov=tests.unit",
+ "--cov-append",
+ "--cov-config=.coveragerc",
+ "--cov-report=",
+ "--cov-fail-under=0",
+ os.path.join("tests", "unit"),
+ *session.posargs,
+ )
+"""
+
+place_before(
+ "noxfile.py",
+ "@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)",
+ open_telemetry_test,
+ escape="()"
+)
+
+skip_tests_if_env_var_not_set ="""# Sanity check: Only run tests if the environment variable is set.
+ if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", "") and not os.environ.get(
+ "SPANNER_EMULATOR_HOST", ""
+ ):
+ session.skip(
+ "Credentials or emulator host must be set via environment variable"
+ )
+"""
+
+place_before(
+ "noxfile.py",
+ "# Install pyopenssl for mTLS testing.",
+ skip_tests_if_env_var_not_set,
+ escape="()"
+)
+
+s.replace(
+ "noxfile.py",
+ """f"--junitxml=unit_{session.python}_sponge_log.xml",
+ "--cov=google/cloud",
+ "--cov=tests/unit",""",
+ """\"--cov=google.cloud.spanner",
+ "--cov=google.cloud",
+ "--cov=tests.unit","""
+)
+
+s.replace(
+ "noxfile.py",
+ """session.install\("-e", "."\)""",
+ """session.install("-e", ".[tracing]")"""
+)
+
+s.replace(
+ "noxfile.py",
+ """# Install all test dependencies, then install this package into the
+ # virtualenv's dist-packages.
+ session.install\("mock", "pytest", "google-cloud-testutils", "-c", constraints_path\)
+ session.install\("-e", ".", "-c", constraints_path\)""",
+ """# Install all test dependencies, then install this package into the
+ # virtualenv's dist-packages.
+ session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path)
+ session.install("-e", ".[tracing]", "-c", constraints_path)"""
+)
+
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/samples/samples/autocommit_test.py b/samples/samples/autocommit_test.py
index c9631516fa..9880460cac 100644
--- a/samples/samples/autocommit_test.py
+++ b/samples/samples/autocommit_test.py
@@ -4,60 +4,29 @@
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
-import uuid
-
from google.api_core.exceptions import Aborted
-from google.cloud import spanner
import pytest
from test_utils.retry import RetryErrors
import autocommit
-def unique_instance_id():
- """Creates a unique id for the database."""
- return f"test-instance-{uuid.uuid4().hex[:10]}"
-
-
-def unique_database_id():
- """Creates a unique id for the database."""
- return f"test-db-{uuid.uuid4().hex[:10]}"
-
-
-INSTANCE_ID = unique_instance_id()
-DATABASE_ID = unique_database_id()
-
-
-@pytest.fixture(scope="module")
-def spanner_instance():
- spanner_client = spanner.Client()
- config_name = f"{spanner_client.project_name}/instanceConfigs/regional-us-central1"
-
- instance = spanner_client.instance(INSTANCE_ID, config_name)
- op = instance.create()
- op.result(120) # block until completion
- yield instance
- instance.delete()
-
-
@pytest.fixture(scope="module")
-def database(spanner_instance):
- """Creates a temporary database that is removed after testing."""
- db = spanner_instance.database(DATABASE_ID)
- db.create()
- yield db
- db.drop()
+def sample_name():
+ return "autocommit"
@RetryErrors(exception=Aborted, max_tries=2)
-def test_enable_autocommit_mode(capsys, database):
+def test_enable_autocommit_mode(capsys, instance_id, sample_database):
# Delete table if it exists for retry attempts.
- table = database.table('Singers')
+ table = sample_database.table('Singers')
if table.exists():
- op = database.update_ddl(["DROP TABLE Singers"])
+ op = sample_database.update_ddl(["DROP TABLE Singers"])
op.result()
- autocommit.enable_autocommit_mode(INSTANCE_ID, DATABASE_ID)
+ autocommit.enable_autocommit_mode(
+ instance_id, sample_database.database_id,
+ )
out, _ = capsys.readouterr()
assert "Autocommit mode is enabled." in out
assert "SingerId: 13, AlbumId: Russell, AlbumTitle: Morales" in out
diff --git a/samples/samples/backup_sample_test.py b/samples/samples/backup_sample_test.py
index 8d1d95ff51..6d89dcf440 100644
--- a/samples/samples/backup_sample_test.py
+++ b/samples/samples/backup_sample_test.py
@@ -14,16 +14,15 @@
import uuid
from google.api_core.exceptions import DeadlineExceeded
-from google.cloud import spanner
import pytest
from test_utils.retry import RetryErrors
import backup_sample
-def unique_instance_id():
- """ Creates a unique id for the database. """
- return f"test-instance-{uuid.uuid4().hex[:10]}"
+@pytest.fixture(scope="module")
+def sample_name():
+ return "backup"
def unique_database_id():
@@ -36,8 +35,6 @@ def unique_backup_id():
return f"test-backup-{uuid.uuid4().hex[:10]}"
-INSTANCE_ID = unique_instance_id()
-DATABASE_ID = unique_database_id()
RESTORE_DB_ID = unique_database_id()
BACKUP_ID = unique_backup_id()
CMEK_RESTORE_DB_ID = unique_database_id()
@@ -46,112 +43,100 @@ def unique_backup_id():
RETENTION_PERIOD = "7d"
-@pytest.fixture(scope="module")
-def spanner_instance():
- spanner_client = spanner.Client()
- instance_config = "{}/instanceConfigs/{}".format(
- spanner_client.project_name, "regional-us-central1"
- )
- instance = spanner_client.instance(INSTANCE_ID, instance_config)
- op = instance.create()
- op.result(120) # block until completion
- yield instance
- for database_pb in instance.list_databases():
- database = instance.database(database_pb.name.split("/")[-1])
- database.drop()
- for backup_pb in instance.list_backups():
- backup = instance.backup(backup_pb.name.split("/")[-1])
- backup.delete()
- instance.delete()
-
-
-@pytest.fixture(scope="module")
-def database(spanner_instance):
- """ Creates a temporary database that is removed after testing. """
- db = spanner_instance.database(DATABASE_ID)
- db.create()
- yield db
- db.drop()
-
-
-def test_create_backup(capsys, database):
+@pytest.mark.dependency(name="create_backup")
+def test_create_backup(capsys, instance_id, sample_database):
version_time = None
- with database.snapshot() as snapshot:
+ with sample_database.snapshot() as snapshot:
results = snapshot.execute_sql("SELECT CURRENT_TIMESTAMP()")
version_time = list(results)[0][0]
- backup_sample.create_backup(INSTANCE_ID, DATABASE_ID, BACKUP_ID, version_time)
+ backup_sample.create_backup(
+ instance_id,
+ sample_database.database_id,
+ BACKUP_ID,
+ version_time,
+ )
out, _ = capsys.readouterr()
assert BACKUP_ID in out
-def test_create_backup_with_encryption_key(capsys, spanner_instance, database):
- kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
- spanner_instance._client.project, "us-central1", "spanner-test-keyring", "spanner-test-cmek"
+@pytest.mark.dependency(name="create_backup_with_encryption_key")
+def test_create_backup_with_encryption_key(
+ capsys, instance_id, sample_database, kms_key_name,
+):
+ backup_sample.create_backup_with_encryption_key(
+ instance_id,
+ sample_database.database_id,
+ CMEK_BACKUP_ID,
+ kms_key_name,
)
- backup_sample.create_backup_with_encryption_key(INSTANCE_ID, DATABASE_ID, CMEK_BACKUP_ID, kms_key_name)
out, _ = capsys.readouterr()
assert CMEK_BACKUP_ID in out
assert kms_key_name in out
-# Depends on test_create_backup having run first
+@pytest.mark.dependency(depends=["create_backup"])
@RetryErrors(exception=DeadlineExceeded, max_tries=2)
-def test_restore_database(capsys):
- backup_sample.restore_database(INSTANCE_ID, RESTORE_DB_ID, BACKUP_ID)
+def test_restore_database(capsys, instance_id, sample_database):
+ backup_sample.restore_database(instance_id, RESTORE_DB_ID, BACKUP_ID)
out, _ = capsys.readouterr()
- assert (DATABASE_ID + " restored to ") in out
+ assert (sample_database.database_id + " restored to ") in out
assert (RESTORE_DB_ID + " from backup ") in out
assert BACKUP_ID in out
-# Depends on test_create_backup having run first
+@pytest.mark.dependency(depends=["create_backup_with_encryption_key"])
@RetryErrors(exception=DeadlineExceeded, max_tries=2)
-def test_restore_database_with_encryption_key(capsys, spanner_instance):
- kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
- spanner_instance._client.project, "us-central1", "spanner-test-keyring", "spanner-test-cmek"
- )
- backup_sample.restore_database_with_encryption_key(INSTANCE_ID, CMEK_RESTORE_DB_ID, CMEK_BACKUP_ID, kms_key_name)
+def test_restore_database_with_encryption_key(
+ capsys, instance_id, sample_database, kms_key_name,
+):
+ backup_sample.restore_database_with_encryption_key(
+ instance_id, CMEK_RESTORE_DB_ID, CMEK_BACKUP_ID, kms_key_name)
out, _ = capsys.readouterr()
- assert (DATABASE_ID + " restored to ") in out
+ assert (sample_database.database_id + " restored to ") in out
assert (CMEK_RESTORE_DB_ID + " from backup ") in out
assert CMEK_BACKUP_ID in out
assert kms_key_name in out
-# Depends on test_create_backup having run first
-def test_list_backup_operations(capsys, spanner_instance):
- backup_sample.list_backup_operations(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["create_backup"])
+def test_list_backup_operations(capsys, instance_id, sample_database):
+ backup_sample.list_backup_operations(
+ instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert BACKUP_ID in out
- assert DATABASE_ID in out
+ assert sample_database.database_id in out
-# Depends on test_create_backup having run first
-def test_list_backups(capsys, spanner_instance):
- backup_sample.list_backups(INSTANCE_ID, DATABASE_ID, BACKUP_ID)
+@pytest.mark.dependency(depends=["create_backup"])
+def test_list_backups(capsys, instance_id, sample_database):
+ backup_sample.list_backups(
+ instance_id, sample_database.database_id, BACKUP_ID,
+ )
out, _ = capsys.readouterr()
id_count = out.count(BACKUP_ID)
assert id_count == 7
-# Depends on test_create_backup having run first
-def test_update_backup(capsys):
- backup_sample.update_backup(INSTANCE_ID, BACKUP_ID)
+@pytest.mark.dependency(depends=["create_backup"])
+def test_update_backup(capsys, instance_id):
+ backup_sample.update_backup(instance_id, BACKUP_ID)
out, _ = capsys.readouterr()
assert BACKUP_ID in out
-# Depends on test_create_backup having run first
-def test_delete_backup(capsys, spanner_instance):
- backup_sample.delete_backup(INSTANCE_ID, BACKUP_ID)
+@pytest.mark.dependency(depends=["create_backup"])
+def test_delete_backup(capsys, instance_id):
+ backup_sample.delete_backup(instance_id, BACKUP_ID)
out, _ = capsys.readouterr()
assert BACKUP_ID in out
-# Depends on test_create_backup having run first
-def test_cancel_backup(capsys):
- backup_sample.cancel_backup(INSTANCE_ID, DATABASE_ID, BACKUP_ID)
+@pytest.mark.dependency(depends=["create_backup"])
+def test_cancel_backup(capsys, instance_id, sample_database):
+ backup_sample.cancel_backup(
+ instance_id, sample_database.database_id, BACKUP_ID,
+ )
out, _ = capsys.readouterr()
cancel_success = "Backup creation was successfully cancelled." in out
cancel_failure = ("Backup was created before the cancel completed." in out) and (
@@ -161,10 +146,12 @@ def test_cancel_backup(capsys):
@RetryErrors(exception=DeadlineExceeded, max_tries=2)
-def test_create_database_with_retention_period(capsys, spanner_instance):
- backup_sample.create_database_with_version_retention_period(INSTANCE_ID, RETENTION_DATABASE_ID, RETENTION_PERIOD)
+def test_create_database_with_retention_period(capsys, sample_instance):
+ backup_sample.create_database_with_version_retention_period(
+ sample_instance.instance_id, RETENTION_DATABASE_ID, RETENTION_PERIOD,
+ )
out, _ = capsys.readouterr()
assert (RETENTION_DATABASE_ID + " created with ") in out
assert ("retention period " + RETENTION_PERIOD) in out
- database = spanner_instance.database(RETENTION_DATABASE_ID)
+ database = sample_instance.database(RETENTION_DATABASE_ID)
database.drop()
diff --git a/samples/samples/conftest.py b/samples/samples/conftest.py
new file mode 100644
index 0000000000..9108a5892a
--- /dev/null
+++ b/samples/samples/conftest.py
@@ -0,0 +1,156 @@
+# Copyright 2021 Google LLC All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+""" Shared pytest fixtures."""
+
+import time
+import uuid
+
+from google.api_core import exceptions
+from google.cloud.spanner_v1 import backup
+from google.cloud.spanner_v1 import client
+from google.cloud.spanner_v1 import database
+from google.cloud.spanner_v1 import instance
+import pytest
+from test_utils import retry
+
+
+@pytest.fixture(scope="module")
+def sample_name():
+ """ Sample testcase modules must define this fixture.
+
+ The name is used to label the instance created by the sample, to
+ aid in debugging leaked instances.
+ """
+ raise NotImplementedError("Define 'sample_name' fixture in sample test driver")
+
+
+@pytest.fixture(scope="session")
+def spanner_client():
+ """Shared client used across all samples in a session."""
+ return client.Client()
+
+
+def scrub_instance_ignore_not_found(to_scrub):
+ """Helper for func:`cleanup_old_instances`"""
+ try:
+ for backup_pb in to_scrub.list_backups():
+ backup.Backup.from_pb(backup_pb, to_scrub).delete()
+
+ to_scrub.delete()
+ except exceptions.NotFound:
+ pass
+
+
+@pytest.fixture(scope="session")
+def cleanup_old_instances(spanner_client):
+ """Delete instances, created by samples, that are older than an hour."""
+ cutoff = int(time.time()) - 1 * 60 * 60
+ instance_filter = "labels.cloud_spanner_samples:true"
+
+ for instance_pb in spanner_client.list_instances(filter_=instance_filter):
+ inst = instance.Instance.from_pb(instance_pb, spanner_client)
+
+ if "created" in inst.labels:
+ create_time = int(inst.labels["created"])
+
+ if create_time <= cutoff:
+ scrub_instance_ignore_not_found(inst)
+
+
+@pytest.fixture(scope="module")
+def instance_id():
+ """Unique id for the instance used in samples."""
+ return f"test-instance-{uuid.uuid4().hex[:10]}"
+
+
+@pytest.fixture(scope="module")
+def instance_config(spanner_client):
+ return "{}/instanceConfigs/{}".format(
+ spanner_client.project_name, "regional-us-central1"
+ )
+
+
+@pytest.fixture(scope="module")
+def sample_instance(
+ spanner_client, cleanup_old_instances, instance_id, instance_config, sample_name,
+):
+ sample_instance = spanner_client.instance(
+ instance_id,
+ instance_config,
+ labels={
+ "cloud_spanner_samples": "true",
+ "sample_name": sample_name,
+ "created": str(int(time.time())),
+ },
+ )
+ retry_429 = retry.RetryErrors(exceptions.ResourceExhausted, delay=15)
+ op = retry_429(sample_instance.create)()
+ op.result(120) # block until completion
+
+ # Eventual consistency check
+ retry_found = retry.RetryResult(bool)
+ retry_found(sample_instance.exists)()
+
+ yield sample_instance
+
+ for database_pb in sample_instance.list_databases():
+ database.Database.from_pb(database_pb, sample_instance).drop()
+
+ for backup_pb in sample_instance.list_backups():
+ backup.Backup.from_pb(backup_pb, sample_instance).delete()
+
+ sample_instance.delete()
+
+
+@pytest.fixture(scope="module")
+def database_id():
+ """Id for the database used in samples.
+
+ Sample testcase modules can override as needed.
+ """
+ return "my-database-id"
+
+
+@pytest.fixture(scope="module")
+def database_ddl():
+ """Sequence of DDL statements used to set up the database.
+
+ Sample testcase modules can override as needed.
+ """
+ return []
+
+
+@pytest.fixture(scope="module")
+def sample_database(sample_instance, database_id, database_ddl):
+
+ sample_database = sample_instance.database(
+ database_id, ddl_statements=database_ddl,
+ )
+
+ if not sample_database.exists():
+ sample_database.create()
+
+ yield sample_database
+
+ sample_database.drop()
+
+
+@pytest.fixture(scope="module")
+def kms_key_name(spanner_client):
+ return "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
+ spanner_client.project,
+ "us-central1",
+ "spanner-test-keyring",
+ "spanner-test-cmek",
+ )
diff --git a/samples/samples/quickstart.py b/samples/samples/quickstart.py
index f19c5f48b2..aa330dd3ca 100644
--- a/samples/samples/quickstart.py
+++ b/samples/samples/quickstart.py
@@ -15,23 +15,23 @@
# limitations under the License.
-def run_quickstart():
+def run_quickstart(instance_id, database_id):
# [START spanner_quickstart]
# Imports the Google Cloud Client Library.
from google.cloud import spanner
+ # Your Cloud Spanner instance ID.
+ # instance_id = "my-instance-id"
+ #
+ # Your Cloud Spanner database ID.
+ # database_id = "my-database-id"
+
# Instantiate a client.
spanner_client = spanner.Client()
- # Your Cloud Spanner instance ID.
- instance_id = "my-instance-id"
-
# Get a Cloud Spanner instance by ID.
instance = spanner_client.instance(instance_id)
- # Your Cloud Spanner database ID.
- database_id = "my-database-id"
-
# Get a Cloud Spanner database by ID.
database = instance.database(database_id)
diff --git a/samples/samples/quickstart_test.py b/samples/samples/quickstart_test.py
index 9b9cbf5cc8..3726e7aef6 100644
--- a/samples/samples/quickstart_test.py
+++ b/samples/samples/quickstart_test.py
@@ -12,45 +12,18 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-import os
-
-from google.cloud import spanner
-import mock
import pytest
import quickstart
-SPANNER_INSTANCE = os.environ["SPANNER_INSTANCE"]
-
-
-@pytest.fixture
-def patch_instance():
- original_instance = spanner.Client.instance
-
- def new_instance(self, unused_instance_name):
- return original_instance(self, SPANNER_INSTANCE)
-
- instance_patch = mock.patch(
- "google.cloud.spanner_v1.Client.instance", side_effect=new_instance, autospec=True
- )
- with instance_patch:
- yield
+@pytest.fixture(scope="module")
+def sample_name():
+ return "quickstart"
-@pytest.fixture
-def example_database():
- spanner_client = spanner.Client()
- instance = spanner_client.instance(SPANNER_INSTANCE)
- database = instance.database("my-database-id")
-
- if not database.exists():
- database.create()
-
- yield
-
-
-def test_quickstart(capsys, patch_instance, example_database):
- quickstart.run_quickstart()
+def test_quickstart(capsys, instance_id, sample_database):
+ quickstart.run_quickstart(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
+
assert "[1]" in out
diff --git a/samples/samples/requirements-test.txt b/samples/samples/requirements-test.txt
index d8fdc314e7..8fcf14a6bb 100644
--- a/samples/samples/requirements-test.txt
+++ b/samples/samples/requirements-test.txt
@@ -1,3 +1,4 @@
pytest==6.2.4
-mock==4.0.2
-google-cloud-testutils==0.2.0
\ No newline at end of file
+pytest-dependency==0.5.1
+mock==4.0.3
+google-cloud-testutils==0.3.0
diff --git a/samples/samples/requirements.txt b/samples/samples/requirements.txt
index 305cd0b7e5..527aa7aa1f 100644
--- a/samples/samples/requirements.txt
+++ b/samples/samples/requirements.txt
@@ -1,2 +1,2 @@
-google-cloud-spanner==3.5.0
+google-cloud-spanner==3.6.0
futures==3.3.0; python_version < "3"
diff --git a/samples/samples/snippets.py b/samples/samples/snippets.py
index 18af239b5b..c6c3972e32 100644
--- a/samples/samples/snippets.py
+++ b/samples/samples/snippets.py
@@ -25,6 +25,7 @@
import datetime
import decimal
import logging
+import time
from google.cloud import spanner
from google.cloud.spanner_v1 import param_types
@@ -44,6 +45,11 @@ def create_instance(instance_id):
configuration_name=config_name,
display_name="This is a display name.",
node_count=1,
+ labels={
+ "cloud_spanner_samples": "true",
+ "sample_name": "snippets-create_instance-explicit",
+ "created": str(int(time.time()))
+ }
)
operation = instance.create()
@@ -57,6 +63,39 @@ def create_instance(instance_id):
# [END spanner_create_instance]
+# [START spanner_create_instance_with_processing_units]
+def create_instance_with_processing_units(instance_id, processing_units):
+ """Creates an instance."""
+ spanner_client = spanner.Client()
+
+ config_name = "{}/instanceConfigs/regional-us-central1".format(
+ spanner_client.project_name
+ )
+
+ instance = spanner_client.instance(
+ instance_id,
+ configuration_name=config_name,
+ display_name="This is a display name.",
+ processing_units=processing_units,
+ labels={
+ "cloud_spanner_samples": "true",
+ "sample_name": "snippets-create_instance_with_processing_units",
+ "created": str(int(time.time()))
+ }
+ )
+
+ operation = instance.create()
+
+ print("Waiting for operation to complete...")
+ operation.result(120)
+
+ print("Created instance {} with {} processing units".format(
+ instance_id, instance.processing_units))
+
+
+# [END spanner_create_instance_with_processing_units]
+
+
# [START spanner_create_database]
def create_database(instance_id, database_id):
"""Creates a database and tables for sample data."""
diff --git a/samples/samples/snippets_test.py b/samples/samples/snippets_test.py
index 28d13fa330..4a8d1991d3 100644
--- a/samples/samples/snippets_test.py
+++ b/samples/samples/snippets_test.py
@@ -15,406 +15,513 @@
import time
import uuid
+from google.api_core import exceptions
from google.cloud import spanner
import pytest
+from test_utils.retry import RetryErrors
import snippets
+CREATE_TABLE_SINGERS = """\
+CREATE TABLE Singers (
+ SingerId INT64 NOT NULL,
+ FirstName STRING(1024),
+ LastName STRING(1024),
+ SingerInfo BYTES(MAX)
+) PRIMARY KEY (SingerId)
+"""
+
+CREATE_TABLE_ALBUMS = """\
+CREATE TABLE Albums (
+ SingerId INT64 NOT NULL,
+ AlbumId INT64 NOT NULL,
+ AlbumTitle STRING(MAX)
+) PRIMARY KEY (SingerId, AlbumId),
+INTERLEAVE IN PARENT Singers ON DELETE CASCADE
+"""
-def unique_instance_id():
- """ Creates a unique id for the database. """
- return f"test-instance-{uuid.uuid4().hex[:10]}"
+
+@pytest.fixture(scope="module")
+def sample_name():
+ return "snippets"
+
+
+@pytest.fixture(scope="module")
+def create_instance_id():
+ """ Id for the low-cost instance. """
+ return f"create-instance-{uuid.uuid4().hex[:10]}"
-def unique_database_id():
- """ Creates a unique id for the database. """
+@pytest.fixture(scope="module")
+def lci_instance_id():
+ """ Id for the low-cost instance. """
+ return f"lci-instance-{uuid.uuid4().hex[:10]}"
+
+
+@pytest.fixture(scope="module")
+def database_id():
return f"test-db-{uuid.uuid4().hex[:10]}"
-INSTANCE_ID = unique_instance_id()
-DATABASE_ID = unique_database_id()
-CMEK_DATABASE_ID = unique_database_id()
+@pytest.fixture(scope="module")
+def create_database_id():
+ return f"create-db-{uuid.uuid4().hex[:10]}"
@pytest.fixture(scope="module")
-def spanner_instance():
- snippets.create_instance(INSTANCE_ID)
- spanner_client = spanner.Client()
- instance = spanner_client.instance(INSTANCE_ID)
- yield instance
- instance.delete()
+def cmek_database_id():
+ return f"cmek-db-{uuid.uuid4().hex[:10]}"
@pytest.fixture(scope="module")
-def database(spanner_instance):
- """ Creates a temporary database that is removed after testing. """
- snippets.create_database(INSTANCE_ID, DATABASE_ID)
- db = spanner_instance.database(DATABASE_ID)
- yield db
- db.drop()
+def database_ddl():
+ """Sequence of DDL statements used to set up the database.
+ Sample testcase modules can override as needed.
+ """
+ return [CREATE_TABLE_SINGERS, CREATE_TABLE_ALBUMS]
-def test_create_instance(spanner_instance):
- # Reload will only succeed if the instance exists.
- spanner_instance.reload()
+def test_create_instance_explicit(spanner_client, create_instance_id):
+ # Rather than re-use 'sample_isntance', we create a new instance, to
+ # ensure that the 'create_instance' snippet is tested.
+ snippets.create_instance(create_instance_id)
+ instance = spanner_client.instance(create_instance_id)
+ instance.delete()
-def test_create_database(database):
- # Reload will only succeed if the database exists.
- database.reload()
+def test_create_database_explicit(sample_instance, create_database_id):
+ # Rather than re-use 'sample_database', we create a new database, to
+ # ensure that the 'create_database' snippet is tested.
+ snippets.create_database(sample_instance.instance_id, create_database_id)
+ database = sample_instance.database(create_database_id)
+ database.drop()
-def test_create_database_with_encryption_config(capsys, spanner_instance):
- kms_key_name = "projects/{}/locations/{}/keyRings/{}/cryptoKeys/{}".format(
- spanner_instance._client.project, "us-central1", "spanner-test-keyring", "spanner-test-cmek"
+
+def test_create_instance_with_processing_units(capsys, lci_instance_id):
+ processing_units = 500
+ retry_429 = RetryErrors(exceptions.ResourceExhausted, delay=15)
+ retry_429(snippets.create_instance_with_processing_units)(
+ lci_instance_id, processing_units,
)
- snippets.create_database_with_encryption_key(INSTANCE_ID, CMEK_DATABASE_ID, kms_key_name)
out, _ = capsys.readouterr()
- assert CMEK_DATABASE_ID in out
+ assert lci_instance_id in out
+ assert "{} processing units".format(processing_units) in out
+ spanner_client = spanner.Client()
+ instance = spanner_client.instance(lci_instance_id)
+ instance.delete()
+
+
+def test_create_database_with_encryption_config(capsys, instance_id, cmek_database_id, kms_key_name):
+ snippets.create_database_with_encryption_key(instance_id, cmek_database_id, kms_key_name)
+ out, _ = capsys.readouterr()
+ assert cmek_database_id in out
assert kms_key_name in out
-def test_insert_data(capsys):
- snippets.insert_data(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="insert_data")
+def test_insert_data(capsys, instance_id, sample_database):
+ snippets.insert_data(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Inserted data" in out
-def test_delete_data(capsys):
- snippets.delete_data(INSTANCE_ID, DATABASE_ID)
- snippets.insert_data(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_data"])
+def test_delete_data(capsys, instance_id, sample_database):
+ snippets.delete_data(instance_id, sample_database.database_id)
+ # put it back for other tests
+ snippets.insert_data(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Deleted data" in out
-def test_query_data(capsys):
- snippets.query_data(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_data"])
+def test_query_data(capsys, instance_id, sample_database):
+ snippets.query_data(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk" in out
-def test_add_column(capsys):
- snippets.add_column(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="add_column", depends=["insert_data"])
+def test_add_column(capsys, instance_id, sample_database):
+ snippets.add_column(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Added the MarketingBudget column." in out
-def test_read_data(capsys):
- snippets.read_data(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_data"])
+def test_read_data(capsys, instance_id, sample_database):
+ snippets.read_data(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk" in out
-def test_update_data(capsys):
+@pytest.mark.dependency(name="update_data", depends=["add_column"])
+def test_update_data(capsys, instance_id, sample_database):
# Sleep for 15 seconds to ensure previous inserts will be
# 'stale' by the time test_read_stale_data is run.
time.sleep(15)
- snippets.update_data(INSTANCE_ID, DATABASE_ID)
+ snippets.update_data(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Updated data." in out
-def test_read_stale_data(capsys):
+@pytest.mark.dependency(depends=["update_data"])
+def test_read_stale_data(capsys, instance_id, sample_database):
# This snippet relies on test_update_data inserting data
# at least 15 seconds after the previous insert
- snippets.read_stale_data(INSTANCE_ID, DATABASE_ID)
+ snippets.read_stale_data(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "SingerId: 1, AlbumId: 1, MarketingBudget: None" in out
-def test_read_write_transaction(capsys):
- snippets.read_write_transaction(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_column"])
+def test_read_write_transaction(capsys, instance_id, sample_database):
+ snippets.read_write_transaction(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Transaction complete" in out
-def test_query_data_with_new_column(capsys):
- snippets.query_data_with_new_column(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_column"])
+def test_query_data_with_new_column(capsys, instance_id, sample_database):
+ snippets.query_data_with_new_column(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "SingerId: 1, AlbumId: 1, MarketingBudget: 300000" in out
assert "SingerId: 2, AlbumId: 2, MarketingBudget: 300000" in out
-def test_add_index(capsys):
- snippets.add_index(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="add_index", depends=["insert_data"])
+def test_add_index(capsys, instance_id, sample_database):
+ snippets.add_index(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Added the AlbumsByAlbumTitle index" in out
-def test_query_data_with_index(capsys):
- snippets.query_data_with_index(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_index"])
+def test_query_data_with_index(capsys, instance_id, sample_database):
+ snippets.query_data_with_index(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Go, Go, Go" in out
assert "Forever Hold Your Peace" in out
assert "Green" not in out
-def test_read_data_with_index(capsys):
- snippets.read_data_with_index(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_index"])
+def test_read_data_with_index(capsys, instance_id, sample_database):
+ snippets.read_data_with_index(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Go, Go, Go" in out
assert "Forever Hold Your Peace" in out
assert "Green" in out
-def test_add_storing_index(capsys):
- snippets.add_storing_index(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="add_storing_index", depends=["insert_data"])
+def test_add_storing_index(capsys, instance_id, sample_database):
+ snippets.add_storing_index(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Added the AlbumsByAlbumTitle2 index." in out
-def test_read_data_with_storing_index(capsys):
- snippets.read_data_with_storing_index(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_storing_index"])
+def test_read_data_with_storing_index(capsys, instance_id, sample_database):
+ snippets.read_data_with_storing_index(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "300000" in out
-def test_read_only_transaction(capsys):
- snippets.read_only_transaction(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_data"])
+def test_read_only_transaction(capsys, instance_id, sample_database):
+ snippets.read_only_transaction(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
# Snippet does two reads, so entry should be listed twice
assert out.count("SingerId: 1, AlbumId: 1, AlbumTitle: Total Junk") == 2
-def test_add_timestamp_column(capsys):
- snippets.add_timestamp_column(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="add_timestamp_column", depends=["insert_data"])
+def test_add_timestamp_column(capsys, instance_id, sample_database):
+ snippets.add_timestamp_column(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert 'Altered table "Albums" on database ' in out
-def test_update_data_with_timestamp(capsys):
- snippets.update_data_with_timestamp(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_timestamp_column"])
+def test_update_data_with_timestamp(capsys, instance_id, sample_database):
+ snippets.update_data_with_timestamp(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Updated data" in out
-def test_query_data_with_timestamp(capsys):
- snippets.query_data_with_timestamp(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_timestamp_column"])
+def test_query_data_with_timestamp(capsys, instance_id, sample_database):
+ snippets.query_data_with_timestamp(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "SingerId: 1, AlbumId: 1, MarketingBudget: 1000000" in out
assert "SingerId: 2, AlbumId: 2, MarketingBudget: 750000" in out
-def test_create_table_with_timestamp(capsys):
- snippets.create_table_with_timestamp(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="create_table_with_timestamp")
+def test_create_table_with_timestamp(capsys, instance_id, sample_database):
+ snippets.create_table_with_timestamp(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Created Performances table on database" in out
-def test_insert_data_with_timestamp(capsys):
- snippets.insert_data_with_timestamp(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["create_table_with_datatypes"])
+def test_insert_data_with_timestamp(capsys, instance_id, sample_database):
+ snippets.insert_data_with_timestamp(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Inserted data." in out
-def test_write_struct_data(capsys):
- snippets.write_struct_data(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="write_struct_data")
+def test_write_struct_data(capsys, instance_id, sample_database):
+ snippets.write_struct_data(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Inserted sample data for STRUCT queries" in out
-def test_query_with_struct(capsys):
- snippets.query_with_struct(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["write_struct_data"])
+def test_query_with_struct(capsys, instance_id, sample_database):
+ snippets.query_with_struct(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "SingerId: 6" in out
-def test_query_with_array_of_struct(capsys):
- snippets.query_with_array_of_struct(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["write_struct_data"])
+def test_query_with_array_of_struct(capsys, instance_id, sample_database):
+ snippets.query_with_array_of_struct(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "SingerId: 8" in out
assert "SingerId: 7" in out
assert "SingerId: 6" in out
-def test_query_struct_field(capsys):
- snippets.query_struct_field(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["write_struct_data"])
+def test_query_struct_field(capsys, instance_id, sample_database):
+ snippets.query_struct_field(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "SingerId: 6" in out
-def test_query_nested_struct_field(capsys):
- snippets.query_nested_struct_field(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["write_struct_data"])
+def test_query_nested_struct_field(capsys, instance_id, sample_database):
+ snippets.query_nested_struct_field(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "SingerId: 6 SongName: Imagination" in out
assert "SingerId: 9 SongName: Imagination" in out
-def test_insert_data_with_dml(capsys):
- snippets.insert_data_with_dml(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="insert_data_with_dml")
+def test_insert_data_with_dml(capsys, instance_id, sample_database):
+ snippets.insert_data_with_dml(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "1 record(s) inserted." in out
-def test_log_commit_stats(capsys):
- snippets.log_commit_stats(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="log_commit_stats")
+def test_log_commit_stats(capsys, instance_id, sample_database):
+ snippets.log_commit_stats(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "1 record(s) inserted." in out
assert "3 mutation(s) in transaction." in out
-def test_update_data_with_dml(capsys):
- snippets.update_data_with_dml(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_data"])
+def test_update_data_with_dml(capsys, instance_id, sample_database):
+ snippets.update_data_with_dml(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "1 record(s) updated." in out
-def test_delete_data_with_dml(capsys):
- snippets.delete_data_with_dml(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_data"])
+def test_delete_data_with_dml(capsys, instance_id, sample_database):
+ snippets.delete_data_with_dml(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "1 record(s) deleted." in out
-def test_update_data_with_dml_timestamp(capsys):
- snippets.update_data_with_dml_timestamp(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_timestamp_column"])
+def test_update_data_with_dml_timestamp(capsys, instance_id, sample_database):
+ snippets.update_data_with_dml_timestamp(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "2 record(s) updated." in out
-def test_dml_write_read_transaction(capsys):
- snippets.dml_write_read_transaction(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="dml_write_read_transaction")
+def test_dml_write_read_transaction(capsys, instance_id, sample_database):
+ snippets.dml_write_read_transaction(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "1 record(s) inserted." in out
assert "FirstName: Timothy, LastName: Campbell" in out
-def test_update_data_with_dml_struct(capsys):
- snippets.update_data_with_dml_struct(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["dml_write_read_transaction"])
+def test_update_data_with_dml_struct(capsys, instance_id, sample_database):
+ snippets.update_data_with_dml_struct(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "1 record(s) updated" in out
-def test_insert_with_dml(capsys):
- snippets.insert_with_dml(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="insert_with_dml")
+def test_insert_with_dml(capsys, instance_id, sample_database):
+ snippets.insert_with_dml(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "4 record(s) inserted" in out
-def test_query_data_with_parameter(capsys):
- snippets.query_data_with_parameter(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_with_dml"])
+def test_query_data_with_parameter(capsys, instance_id, sample_database):
+ snippets.query_data_with_parameter(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "SingerId: 12, FirstName: Melissa, LastName: Garcia" in out
-def test_write_with_dml_transaction(capsys):
- snippets.write_with_dml_transaction(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_column"])
+def test_write_with_dml_transaction(capsys, instance_id, sample_database):
+ snippets.write_with_dml_transaction(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Transferred 200000 from Album2's budget to Album1's" in out
-def update_data_with_partitioned_dml(capsys):
- snippets.update_data_with_partitioned_dml(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_column"])
+def update_data_with_partitioned_dml(capsys, instance_id, sample_database):
+ snippets.update_data_with_partitioned_dml(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "3 record(s) updated" in out
-def delete_data_with_partitioned_dml(capsys):
- snippets.delete_data_with_partitioned_dml(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_with_dml"])
+def test_delete_data_with_partitioned_dml(capsys, instance_id, sample_database):
+ snippets.delete_data_with_partitioned_dml(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
- assert "5 record(s) deleted" in out
+ assert "6 record(s) deleted" in out
-def update_with_batch_dml(capsys):
- snippets.update_with_batch_dml(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_column"])
+def test_update_with_batch_dml(capsys, instance_id, sample_database):
+ snippets.update_with_batch_dml(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Executed 2 SQL statements using Batch DML" in out
-def test_create_table_with_datatypes(capsys):
- snippets.create_table_with_datatypes(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(name="create_table_with_datatypes")
+def test_create_table_with_datatypes(capsys, instance_id, sample_database):
+ snippets.create_table_with_datatypes(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Created Venues table on database" in out
-def test_insert_datatypes_data(capsys):
- snippets.insert_datatypes_data(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(
+ name="insert_datatypes_data", depends=["create_table_with_datatypes"],
+)
+def test_insert_datatypes_data(capsys, instance_id, sample_database):
+ snippets.insert_datatypes_data(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Inserted data." in out
-def test_query_data_with_array(capsys):
- snippets.query_data_with_array(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_datatypes_data"])
+def test_query_data_with_array(capsys, instance_id, sample_database):
+ snippets.query_data_with_array(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 19, VenueName: Venue 19, AvailableDate: 2020-11-01" in out
assert "VenueId: 42, VenueName: Venue 42, AvailableDate: 2020-10-01" in out
-def test_query_data_with_bool(capsys):
- snippets.query_data_with_bool(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_datatypes_data"])
+def test_query_data_with_bool(capsys, instance_id, sample_database):
+ snippets.query_data_with_bool(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 19, VenueName: Venue 19, OutdoorVenue: True" in out
-def test_query_data_with_bytes(capsys):
- snippets.query_data_with_bytes(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_datatypes_data"])
+def test_query_data_with_bytes(capsys, instance_id, sample_database):
+ snippets.query_data_with_bytes(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 4, VenueName: Venue 4" in out
-def test_query_data_with_date(capsys):
- snippets.query_data_with_date(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_datatypes_data"])
+def test_query_data_with_date(capsys, instance_id, sample_database):
+ snippets.query_data_with_date(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 4, VenueName: Venue 4, LastContactDate: 2018-09-02" in out
assert "VenueId: 42, VenueName: Venue 42, LastContactDate: 2018-10-01" in out
-def test_query_data_with_float(capsys):
- snippets.query_data_with_float(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_datatypes_data"])
+def test_query_data_with_float(capsys, instance_id, sample_database):
+ snippets.query_data_with_float(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 4, VenueName: Venue 4, PopularityScore: 0.8" in out
assert "VenueId: 19, VenueName: Venue 19, PopularityScore: 0.9" in out
-def test_query_data_with_int(capsys):
- snippets.query_data_with_int(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_datatypes_data"])
+def test_query_data_with_int(capsys, instance_id, sample_database):
+ snippets.query_data_with_int(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 19, VenueName: Venue 19, Capacity: 6300" in out
assert "VenueId: 42, VenueName: Venue 42, Capacity: 3000" in out
-def test_query_data_with_string(capsys):
- snippets.query_data_with_string(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_datatypes_data"])
+def test_query_data_with_string(capsys, instance_id, sample_database):
+ snippets.query_data_with_string(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 42, VenueName: Venue 42" in out
-def test_add_numeric_column(capsys):
- snippets.add_numeric_column(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(
+ name="add_numeric_column", depends=["create_table_with_datatypes"],
+)
+def test_add_numeric_column(capsys, instance_id, sample_database):
+ snippets.add_numeric_column(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert 'Altered table "Venues" on database ' in out
-def test_update_data_with_numeric(capsys):
- snippets.update_data_with_numeric(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_numeric_column", "insert_datatypes_data"])
+def test_update_data_with_numeric(capsys, instance_id, sample_database):
+ snippets.update_data_with_numeric(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "Updated data" in out
-def test_query_data_with_numeric_parameter(capsys):
- snippets.query_data_with_numeric_parameter(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["add_numeric_column"])
+def test_query_data_with_numeric_parameter(capsys, instance_id, sample_database):
+ snippets.query_data_with_numeric_parameter(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 4, Revenue: 35000" in out
-def test_query_data_with_timestamp_parameter(capsys):
- snippets.query_data_with_timestamp_parameter(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_datatypes_data"])
+def test_query_data_with_timestamp_parameter(capsys, instance_id, sample_database):
+ snippets.query_data_with_timestamp_parameter(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out
assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out
assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out
-def test_query_data_with_query_options(capsys):
- snippets.query_data_with_query_options(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.dependency(depends=["insert_datatypes_data"])
+def test_query_data_with_query_options(capsys, instance_id, sample_database):
+ snippets.query_data_with_query_options(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out
assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out
assert "VenueId: 42, VenueName: Venue 42, LastUpdateTime:" in out
-def test_create_client_with_query_options(capsys):
- snippets.create_client_with_query_options(INSTANCE_ID, DATABASE_ID)
+@pytest.mark.skip(
+ "Failure is due to the package being missing on the backend."
+ "See: https://github.com/googleapis/python-spanner/issues/421"
+)
+@pytest.mark.dependency(depends=["insert_datatypes_data"])
+def test_create_client_with_query_options(capsys, instance_id, sample_database):
+ snippets.create_client_with_query_options(instance_id, sample_database.database_id)
out, _ = capsys.readouterr()
assert "VenueId: 4, VenueName: Venue 4, LastUpdateTime:" in out
assert "VenueId: 19, VenueName: Venue 19, LastUpdateTime:" in out
diff --git a/setup.py b/setup.py
index c9e69d9271..725baaf8bb 100644
--- a/setup.py
+++ b/setup.py
@@ -22,15 +22,21 @@
name = "google-cloud-spanner"
description = "Cloud Spanner API client library"
-version = "3.6.0"
+version = "3.7.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
# 'Development Status :: 5 - Production/Stable'
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "google-api-core[grpc] >= 1.22.2, < 2.0.0dev",
- "google-cloud-core >= 1.4.1, < 2.0dev",
+ # NOTE: Maintainers, please do not require google-api-core>=2.x.x
+ # Until this issue is closed
+ # https://github.com/googleapis/google-cloud-python/issues/10566
+ "google-api-core[grpc] >= 1.26.0, <3.0.0dev",
+ # NOTE: Maintainers, please do not require google-cloud-core>=2.x.x
+ # Until this issue is closed
+ # https://github.com/googleapis/google-cloud-python/issues/10566
+ "google-cloud-core >= 1.4.1, < 3.0dev",
"grpc-google-iam-v1 >= 0.12.3, < 0.13dev",
"proto-plus >= 1.11.0",
"sqlparse >= 0.3.0",
@@ -40,7 +46,7 @@
"tracing": [
"opentelemetry-api >= 1.1.0",
"opentelemetry-sdk >= 1.1.0",
- "opentelemetry-instrumentation >= 0.20b0",
+ "opentelemetry-instrumentation >= 0.20b0, < 0.23dev",
],
"libcst": "libcst >= 0.2.5",
}
diff --git a/test_utils/credentials.json.enc b/test_utils/credentials.json.enc
deleted file mode 100644
index f073c7e4f7..0000000000
--- a/test_utils/credentials.json.enc
+++ /dev/null
@@ -1,49 +0,0 @@
-U2FsdGVkX1/vVm/dOEg1DCACYbdOcL+ey6+64A+DZGZVgF8Z/3skK6rpPocu6GOA
-UZAqASsBH9QifDf8cKVXQXVYpYq6HSv2O0w7vOmVorZO9GYPo98s9/8XO+4ty/AU
-aB6TD68frBAYv4cT/l5m7aYdzfzMTy0EOXoleZT09JYP3B5FV3KCO114FzMXGwrj
-HXsR6E5SyUUlUnWPC3eD3aqmovay0gxOKYO3ZwjFK1nlbN/8q6/8nwBCf/Bg6SHV
-V93pNxdolRlJev9kgKz4RN1z4jGCy5PAndhSLE82NFIs9LoAiEOU5YeMlN+Ulqus
-J92nh+ptUe9a4pJGbAuveUWO7zdS1QyXvTMUcmmSfXCNm/eIQjNuu5+rHtIjWKh8
-Ilwj2w1aTfSptQEhk/kwRgFz/d11vfwJzvwTmCxO6zyOeL0VUWLqdCBGgG5As9He
-/RenF8PZ1O0WbTt7fns5oTlTk/MUo+0xJ1xqvu/y45LaqqcBAnEdrWKmtM3dJHWv
-ufQku+kD+83F/VwBnQdvgMHu6KZEs6LRrNo58r4QuK6fS7VCACdzxID1RM2cL7kT
-6BFRlyGj1aigmjne9g9M9Jx4R+mZDpPU1WDzzG71J4qCUwaX8Dfwutuv4uiFvzwq
-NUF0wLJJPtKWmtW+hnZ/fhHQGCRsOpZzFnqp6Zv7J7k6esqxMgIjfal7Djk5Acy8
-j3iVvm6CYmKMVqzL62JHYS9Ye83tzBCaR8hpnJQKgH3FSOFY8HSwrtQSIsl/hSeF
-41sgnz0Y+/gkzNeU18qFk+eCZmvljyu+JK0nPYUgpOCJYVBNQpNHz5PUyiAEKhtM
-IOSdjPRW1Y+Xf4RroJnLPoF24Ijwrow5LCm9hBRY6TPPMMmnIXCd23xcLJ1rMj6g
-x4ZikElans+cwuc9wtbb7w01DcpTwQ1+eIV1qV+KIgpnLjRGLhZD4etobBsrwYu/
-vnIwy2QHCKENPb8sbdgp7x2mF7VSX0/7tf+9+i70EBiMzpOKBkiZhtLzm6hOBkEy
-ODaWrx4lTTwbSw8Rmtf58APhPFMsjHoNsjiUoK249Y8Y2Ff4fMfqYsXu6VC1n/At
-CuWYHc3EfBwFcLJS+RQB9kFk/4FygFBWq4Kj0MqoRruLbKmoGeJKH9q35W0f0NCD
-j+iHt3014kMGiuyJe1UDQ6fvEihFFdHuDivFpPAXDt4PTY/WtpDhaGMx23kb54pK
-jkAuxpznAB1lK3u9bGRXDasGeHIrNtIlPvgkrWHXvoBVqM7zry8TGtoxp3E3I42Z
-cUfDWfB9GqVdrOwvrTzyZsl2uShRkAJaZFZj5aMyYxiptp4gM8CwWiNtOd2EwtRO
-LxZX4M02PQFIqXV3FSDA0q6EwglUrTZdAlYeOEkopaKCtG31dEPOSQG3NGJAEYso
-Cxm99H7970dp0OAgpNSgRbcWDbhVbQXnRzvFGqLeH6a9dQ/a8uD3s8Qm9Du/kB6d
-XxTRe2OGxzcD0AgI8GClE4rIZHCLbcwuJRp0EYcN+pgY80O4U98fZ5RYpU6OYbU/
-MEiaBYFKtZtGkV6AQD568V7hHJWqc5DDfVHUQ/aeQwnKi2vnU66u+nnV2rZxXxLP
-+dqeLRpul+wKa5b/Z5SfQ14Ff8s7aVyxaogGpyggyPL1vyq4KWZ6Or/wEE5hgNO4
-kBh6ht0QT1Hti8XY2JK1M+Jgbjgcg4jkHBGVqegrG1Rvcc2A4TYKwx+QMSBhyxrU
-5qhROjS4lTcC42hQslMUkUwc4U/Y91XdFbOOnaAkwzI36NRYL0pmgZnYxGJZeRvr
-E5foOhnOEVSFGdOkLfFh+FkWZQf56Lmn8Gg2wHE3dZTxLHibiUYfkgOr1uEosq29
-D1NstvlJURPQ0Q+8QQNWcl9nEZHMAjOmnL1hbx+QfuC6seucp+sXGzdZByMLZbvT
-tG8KNL293CmyQowgf9MXToWYnwRkcvqfTaKyor2Ggze3JtoFW4t0j4DI1XPciZFX
-XmfApHrzdB/bZadzxyaZ2NE0CuH9zDelwI6rz38xsN5liYnp5qmNKVCZVOHccXa6
-J8x365m5/VaaA2RrtdPqKxn8VaKy7+T690QgMXVGM4PbzQzQxHuSleklocqlP+sB
-jSMXCZY+ng/i4UmRO9noiyW3UThYh0hIdMYs12EmmI9cnF/OuYZpl30fmqwV+VNM
-td5B2fYvAvvsjiX60SFCn3DATP1GrPMBlZSmhhP3GYS+xrWt3Xxta9qIX2BEF1Gg
-twnZZRjoULSRFUYPfJPEOfEH2UQwm84wxx/GezVE+S/RpBlatPOgCiLnNNaLfdTC
-mTG9qY9elJv3GGQO8Lqgf4i8blExs05lSPk1BDhzTB6H9TLz+Ge0/l1QxKf3gPXU
-aImK1azieXMXHECkdKxrzmehwu1dZ/oYOLc/OFQCETwSRoLPFOFpYUpizwmVVHR6
-uLSfRptte4ZOU3zHfpd/0+J4tkwHwEkGzsmMdqudlm7qME6upuIplyVBH8JiXzUK
-n1RIH/OPmVEluAnexWRLZNdk7MrakIO4XACVbICENiYQgAIErP568An6twWEGDbZ
-bEN64E3cVDTDRPRAunIhhsEaapcxpFEPWlHorxv36nMUt0R0h0bJlCu5QdzckfcX
-ZrRuu1kl76ZfbSE8T0G4/rBb9gsU4Gn3WyvLIO3MgFBuxR68ZwcR8LpEUd8qp38H
-NG4cxPmN1nGKo663Z+xI2Gt5up4gpl+fOt4mXqxY386rB7yHaOfElMG5TUYdrS9w
-1xbbCVgeJ6zxX+NFlndG33cSAPprhw+C18eUu6ZU63WZcYFo3GfK6rs3lvYtofvE
-8DxztdTidQedNVNE+63YCjhxd/cZUI5n/UpgYkr9owp7hNGJiR3tdoNLR2gcoGqL
-qWhH928k2aSgF2j97LZ2OqoPCp0tUB7ho4jD2u4Ik3GLVNlCc3dCvWRvpHtDTQDv
-tujESMfHUc9I2r4S/PD3bku/ABGwa977Yp1PjzJGr9RajA5is5n6GVpyynwjtKG4
-iyyITpdwpCgr8pueTBLwZnas3slmiMOog/E4PmPgctHzvC+vhQijhUtw5zSsmv0l
-bZlw/mVhp5Ta7dTcLBKR8DA3m3vTbaEGkz0xpfQr7GfiSMRbJyvIw88pDK0gyTMD
diff --git a/test_utils/scripts/circleci/get_tagged_package.py b/test_utils/scripts/circleci/get_tagged_package.py
deleted file mode 100644
index c148b9dc23..0000000000
--- a/test_utils/scripts/circleci/get_tagged_package.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright 2016 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Helper to determine package from tag.
-Get the current package directory corresponding to the Circle Tag.
-"""
-
-from __future__ import print_function
-
-import os
-import re
-import sys
-
-
-TAG_RE = re.compile(r"""
- ^
- (?P
- (([a-z]+)[_-])*) # pkg-name-with-hyphens-or-underscores (empty allowed)
- ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints)
- $
-""", re.VERBOSE)
-TAG_ENV = 'CIRCLE_TAG'
-ERROR_MSG = '%s env. var. not set' % (TAG_ENV,)
-BAD_TAG_MSG = 'Invalid tag name: %s. Expected pkg-name-x.y.z'
-CIRCLE_CI_SCRIPTS_DIR = os.path.dirname(__file__)
-ROOT_DIR = os.path.realpath(
- os.path.join(CIRCLE_CI_SCRIPTS_DIR, '..', '..', '..'))
-
-
-def main():
- """Get the current package directory.
- Prints the package directory out so callers can consume it.
- """
- if TAG_ENV not in os.environ:
- print(ERROR_MSG, file=sys.stderr)
- sys.exit(1)
-
- tag_name = os.environ[TAG_ENV]
- match = TAG_RE.match(tag_name)
- if match is None:
- print(BAD_TAG_MSG % (tag_name,), file=sys.stderr)
- sys.exit(1)
-
- pkg_name = match.group('pkg')
- if pkg_name is None:
- print(ROOT_DIR)
- else:
- pkg_dir = pkg_name.rstrip('-').replace('-', '_')
- print(os.path.join(ROOT_DIR, pkg_dir))
-
-
-if __name__ == '__main__':
- main()
diff --git a/test_utils/scripts/circleci/twine_upload.sh b/test_utils/scripts/circleci/twine_upload.sh
deleted file mode 100755
index 23a4738e90..0000000000
--- a/test_utils/scripts/circleci/twine_upload.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-
-# Copyright 2016 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -ev
-
-# If this is not a CircleCI tag, no-op.
-if [[ -z "$CIRCLE_TAG" ]]; then
- echo "This is not a release tag. Doing nothing."
- exit 0
-fi
-
-# H/T: http://stackoverflow.com/a/246128/1068170
-SCRIPT="$(dirname "${BASH_SOURCE[0]}")/get_tagged_package.py"
-# Determine the package directory being deploying on this tag.
-PKG_DIR="$(python ${SCRIPT})"
-
-# Ensure that we have the latest versions of Twine, Wheel, and Setuptools.
-python3 -m pip install --upgrade twine wheel setuptools
-
-# Move into the package, build the distribution and upload.
-cd ${PKG_DIR}
-python3 setup.py sdist bdist_wheel
-twine upload dist/*
diff --git a/test_utils/scripts/get_target_packages.py b/test_utils/scripts/get_target_packages.py
deleted file mode 100644
index 1d51830cc2..0000000000
--- a/test_utils/scripts/get_target_packages.py
+++ /dev/null
@@ -1,268 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Print a list of packages which require testing."""
-
-import os
-import re
-import subprocess
-import warnings
-
-
-CURRENT_DIR = os.path.realpath(os.path.dirname(__file__))
-BASE_DIR = os.path.realpath(os.path.join(CURRENT_DIR, '..', '..'))
-GITHUB_REPO = os.environ.get('GITHUB_REPO', 'google-cloud-python')
-CI = os.environ.get('CI', '')
-CI_BRANCH = os.environ.get('CIRCLE_BRANCH')
-CI_PR = os.environ.get('CIRCLE_PR_NUMBER')
-CIRCLE_TAG = os.environ.get('CIRCLE_TAG')
-head_hash, head_name = subprocess.check_output(['git', 'show-ref', 'HEAD']
-).strip().decode('ascii').split()
-rev_parse = subprocess.check_output(
- ['git', 'rev-parse', '--abbrev-ref', 'HEAD']
-).strip().decode('ascii')
-MAJOR_DIV = '#' * 78
-MINOR_DIV = '#' + '-' * 77
-
-# NOTE: This reg-ex is copied from ``get_tagged_packages``.
-TAG_RE = re.compile(r"""
- ^
- (?P
- (([a-z]+)-)*) # pkg-name-with-hyphens- (empty allowed)
- ([0-9]+)\.([0-9]+)\.([0-9]+) # Version x.y.z (x, y, z all ints)
- $
-""", re.VERBOSE)
-
-# This is the current set of dependencies by package.
-# As of this writing, the only "real" dependency is that of error_reporting
-# (on logging), the rest are just system test dependencies.
-PKG_DEPENDENCIES = {
- 'logging': {'pubsub'},
-}
-
-
-def get_baseline():
- """Return the baseline commit.
-
- On a pull request, or on a branch, return the common parent revision
- with the master branch.
-
- Locally, return a value pulled from environment variables, or None if
- the environment variables are not set.
-
- On a push to master, return None. This will effectively cause everything
- to be considered to be affected.
- """
-
- # If this is a pull request or branch, return the tip for master.
- # We will test only packages which have changed since that point.
- ci_non_master = (CI == 'true') and any([CI_BRANCH != 'master', CI_PR])
-
- if ci_non_master:
-
- repo_url = 'git@github.com:GoogleCloudPlatform/{}'.format(GITHUB_REPO)
- subprocess.run(['git', 'remote', 'add', 'baseline', repo_url],
- stderr=subprocess.DEVNULL)
- subprocess.run(['git', 'pull', 'baseline'], stderr=subprocess.DEVNULL)
-
- if CI_PR is None and CI_BRANCH is not None:
- output = subprocess.check_output([
- 'git', 'merge-base', '--fork-point',
- 'baseline/master', CI_BRANCH])
- return output.strip().decode('ascii')
-
- return 'baseline/master'
-
- # If environment variables are set identifying what the master tip is,
- # use that.
- if os.environ.get('GOOGLE_CLOUD_TESTING_REMOTE', ''):
- remote = os.environ['GOOGLE_CLOUD_TESTING_REMOTE']
- branch = os.environ.get('GOOGLE_CLOUD_TESTING_BRANCH', 'master')
- return '%s/%s' % (remote, branch)
-
- # If we are not in CI and we got this far, issue a warning.
- if not CI:
- warnings.warn('No baseline could be determined; this means tests '
- 'will run for every package. If this is local '
- 'development, set the $GOOGLE_CLOUD_TESTING_REMOTE '
- 'environment variable.')
-
- # That is all we can do; return None.
- return None
-
-
-def get_changed_files():
- """Return a list of files that have been changed since the baseline.
-
- If there is no base, return None.
- """
- # Get the baseline, and fail quickly if there is no baseline.
- baseline = get_baseline()
- print('# Baseline commit: {}'.format(baseline))
- if not baseline:
- return None
-
- # Return a list of altered files.
- try:
- return subprocess.check_output([
- 'git', 'diff', '--name-only', '{}..HEAD'.format(baseline),
- ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n')
- except subprocess.CalledProcessError:
- warnings.warn('Unable to perform git diff; falling back to assuming '
- 'all packages have changed.')
- return None
-
-
-def reverse_map(dict_of_sets):
- """Reverse a map of one-to-many.
-
- So the map::
-
- {
- 'A': {'B', 'C'},
- 'B': {'C'},
- }
-
- becomes
-
- {
- 'B': {'A'},
- 'C': {'A', 'B'},
- }
-
- Args:
- dict_of_sets (dict[set]): A dictionary of sets, mapping
- one value to many.
-
- Returns:
- dict[set]: The reversed map.
- """
- result = {}
- for key, values in dict_of_sets.items():
- for value in values:
- result.setdefault(value, set()).add(key)
-
- return result
-
-def get_changed_packages(file_list):
- """Return a list of changed packages based on the provided file list.
-
- If the file list is None, then all packages should be considered to be
- altered.
- """
- # Determine a complete list of packages.
- all_packages = set()
- for file_ in os.listdir(BASE_DIR):
- abs_file = os.path.realpath(os.path.join(BASE_DIR, file_))
- nox_file = os.path.join(abs_file, 'nox.py')
- if os.path.isdir(abs_file) and os.path.isfile(nox_file):
- all_packages.add(file_)
-
- # If ther is no file list, send down the full package set.
- if file_list is None:
- return all_packages
-
- # Create a set based on the list of changed files.
- answer = set()
- reverse_deps = reverse_map(PKG_DEPENDENCIES)
- for file_ in file_list:
- # Ignore root directory changes (setup.py, .gitignore, etc.).
- if os.path.sep not in file_:
- continue
-
- # Ignore changes that are not in a package (usually this will be docs).
- package = file_.split(os.path.sep, 1)[0]
- if package not in all_packages:
- continue
-
- # If there is a change in core, short-circuit now and return
- # everything.
- if package in ('core',):
- return all_packages
-
- # Add the package, as well as any dependencies this package has.
- # NOTE: For now, dependencies only go down one level.
- answer.add(package)
- answer = answer.union(reverse_deps.get(package, set()))
-
- # We got this far without being short-circuited; return the final answer.
- return answer
-
-
-def get_tagged_package():
- """Return the package corresponding to the current tag.
-
- If there is not tag, will return :data:`None`.
- """
- if CIRCLE_TAG is None:
- return
-
- match = TAG_RE.match(CIRCLE_TAG)
- if match is None:
- return
-
- pkg_name = match.group('pkg')
- if pkg_name == '':
- # NOTE: This corresponds to the "umbrella" tag.
- return
-
- return pkg_name.rstrip('-').replace('-', '_')
-
-
-def get_target_packages():
- """Return a list of target packages to be run in the current build.
-
- If in a tag build, will run only the package(s) that are tagged, otherwise
- will run the packages that have file changes in them (or packages that
- depend on those).
- """
- tagged_package = get_tagged_package()
- if tagged_package is None:
- file_list = get_changed_files()
- print(MAJOR_DIV)
- print('# Changed files:')
- print(MINOR_DIV)
- for file_ in file_list or ():
- print('# {}'.format(file_))
- for package in sorted(get_changed_packages(file_list)):
- yield package
- else:
- yield tagged_package
-
-
-def main():
- print(MAJOR_DIV)
- print('# Environment')
- print(MINOR_DIV)
- print('# CircleCI: {}'.format(CI))
- print('# CircleCI branch: {}'.format(CI_BRANCH))
- print('# CircleCI pr: {}'.format(CI_PR))
- print('# CircleCI tag: {}'.format(CIRCLE_TAG))
- print('# HEAD ref: {}'.format(head_hash))
- print('# {}'.format(head_name))
- print('# Git branch: {}'.format(rev_parse))
- print(MAJOR_DIV)
-
- packages = list(get_target_packages())
-
- print(MAJOR_DIV)
- print('# Target packages:')
- print(MINOR_DIV)
- for package in packages:
- print(package)
- print(MAJOR_DIV)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test_utils/scripts/get_target_packages_kokoro.py b/test_utils/scripts/get_target_packages_kokoro.py
deleted file mode 100644
index 27d3a0c940..0000000000
--- a/test_utils/scripts/get_target_packages_kokoro.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Print a list of packages which require testing."""
-
-import pathlib
-import subprocess
-
-import ci_diff_helper
-import requests
-
-
-def print_environment(environment):
- print("-> CI environment:")
- print('Branch', environment.branch)
- print('PR', environment.pr)
- print('In PR', environment.in_pr)
- print('Repo URL', environment.repo_url)
- if environment.in_pr:
- print('PR Base', environment.base)
-
-
-def get_base(environment):
- if environment.in_pr:
- return environment.base
- else:
- # If we're not in a PR, just calculate the changes between this commit
- # and its parent.
- return 'HEAD~1'
-
-
-def get_changed_files_from_base(base):
- return subprocess.check_output([
- 'git', 'diff', '--name-only', f'{base}..HEAD',
- ], stderr=subprocess.DEVNULL).decode('utf8').strip().split('\n')
-
-
-_URL_TEMPLATE = (
- 'https://api.github.com/repos/googleapis/google-cloud-python/pulls/'
- '{}/files'
-)
-
-
-def get_changed_files_from_pr(pr):
- url = _URL_TEMPLATE.format(pr)
- while url is not None:
- response = requests.get(url)
- for info in response.json():
- yield info['filename']
- url = response.links.get('next', {}).get('url')
-
-
-def determine_changed_packages(changed_files):
- packages = [
- path.parent for path in pathlib.Path('.').glob('*/noxfile.py')
- ]
-
- changed_packages = set()
- for file in changed_files:
- file = pathlib.Path(file)
- for package in packages:
- if package in file.parents:
- changed_packages.add(package)
-
- return changed_packages
-
-
-def main():
- environment = ci_diff_helper.get_config()
- print_environment(environment)
- base = get_base(environment)
-
- if environment.in_pr:
- changed_files = list(get_changed_files_from_pr(environment.pr))
- else:
- changed_files = get_changed_files_from_base(base)
-
- packages = determine_changed_packages(changed_files)
-
- print(f"Comparing against {base}.")
- print("-> Changed packages:")
-
- for package in packages:
- print(package)
-
-
-main()
diff --git a/test_utils/scripts/run_emulator.py b/test_utils/scripts/run_emulator.py
deleted file mode 100644
index 287b086406..0000000000
--- a/test_utils/scripts/run_emulator.py
+++ /dev/null
@@ -1,199 +0,0 @@
-# Copyright 2016 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Run system tests locally with the emulator.
-
-First makes system calls to spawn the emulator and get the local environment
-variable needed for it. Then calls the system tests.
-"""
-
-
-import argparse
-import os
-import subprocess
-
-import psutil
-
-from google.cloud.environment_vars import BIGTABLE_EMULATOR
-from google.cloud.environment_vars import GCD_DATASET
-from google.cloud.environment_vars import GCD_HOST
-from google.cloud.environment_vars import PUBSUB_EMULATOR
-from run_system_test import run_module_tests
-
-
-BIGTABLE = 'bigtable'
-DATASTORE = 'datastore'
-PUBSUB = 'pubsub'
-PACKAGE_INFO = {
- BIGTABLE: (BIGTABLE_EMULATOR,),
- DATASTORE: (GCD_DATASET, GCD_HOST),
- PUBSUB: (PUBSUB_EMULATOR,),
-}
-EXTRA = {
- DATASTORE: ('--no-legacy',),
-}
-_DS_READY_LINE = '[datastore] Dev App Server is now running.\n'
-_PS_READY_LINE_PREFIX = '[pubsub] INFO: Server started, listening on '
-_BT_READY_LINE_PREFIX = '[bigtable] Cloud Bigtable emulator running on '
-
-
-def get_parser():
- """Get simple ``argparse`` parser to determine package.
-
- :rtype: :class:`argparse.ArgumentParser`
- :returns: The parser for this script.
- """
- parser = argparse.ArgumentParser(
- description='Run google-cloud system tests against local emulator.')
- parser.add_argument('--package', dest='package',
- choices=sorted(PACKAGE_INFO.keys()),
- default=DATASTORE, help='Package to be tested.')
- return parser
-
-
-def get_start_command(package):
- """Get command line arguments for starting emulator.
-
- :type package: str
- :param package: The package to start an emulator for.
-
- :rtype: tuple
- :returns: The arguments to be used, in a tuple.
- """
- result = ('gcloud', 'beta', 'emulators', package, 'start')
- extra = EXTRA.get(package, ())
- return result + extra
-
-
-def get_env_init_command(package):
- """Get command line arguments for getting emulator env. info.
-
- :type package: str
- :param package: The package to get environment info for.
-
- :rtype: tuple
- :returns: The arguments to be used, in a tuple.
- """
- result = ('gcloud', 'beta', 'emulators', package, 'env-init')
- extra = EXTRA.get(package, ())
- return result + extra
-
-
-def datastore_wait_ready(popen):
- """Wait until the datastore emulator is ready to use.
-
- :type popen: :class:`subprocess.Popen`
- :param popen: An open subprocess to interact with.
- """
- emulator_ready = False
- while not emulator_ready:
- emulator_ready = popen.stderr.readline() == _DS_READY_LINE
-
-
-def wait_ready_prefix(popen, prefix):
- """Wait until the a process encounters a line with matching prefix.
-
- :type popen: :class:`subprocess.Popen`
- :param popen: An open subprocess to interact with.
-
- :type prefix: str
- :param prefix: The prefix to match
- """
- emulator_ready = False
- while not emulator_ready:
- emulator_ready = popen.stderr.readline().startswith(prefix)
-
-
-def wait_ready(package, popen):
- """Wait until the emulator is ready to use.
-
- :type package: str
- :param package: The package to check if ready.
-
- :type popen: :class:`subprocess.Popen`
- :param popen: An open subprocess to interact with.
-
- :raises: :class:`KeyError` if the ``package`` is not among
- ``datastore``, ``pubsub`` or ``bigtable``.
- """
- if package == DATASTORE:
- datastore_wait_ready(popen)
- elif package == PUBSUB:
- wait_ready_prefix(popen, _PS_READY_LINE_PREFIX)
- elif package == BIGTABLE:
- wait_ready_prefix(popen, _BT_READY_LINE_PREFIX)
- else:
- raise KeyError('Package not supported', package)
-
-
-def cleanup(pid):
- """Cleanup a process (including all of its children).
-
- :type pid: int
- :param pid: Process ID.
- """
- proc = psutil.Process(pid)
- for child_proc in proc.children(recursive=True):
- try:
- child_proc.kill()
- child_proc.terminate()
- except psutil.NoSuchProcess:
- pass
- proc.terminate()
- proc.kill()
-
-
-def run_tests_in_emulator(package):
- """Spawn an emulator instance and run the system tests.
-
- :type package: str
- :param package: The package to run system tests against.
- """
- # Make sure this package has environment vars to replace.
- env_vars = PACKAGE_INFO[package]
-
- start_command = get_start_command(package)
- # Ignore stdin and stdout, don't pollute the user's output with them.
- proc_start = subprocess.Popen(start_command, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- try:
- wait_ready(package, proc_start)
- env_init_command = get_env_init_command(package)
- proc_env = subprocess.Popen(env_init_command, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- env_status = proc_env.wait()
- if env_status != 0:
- raise RuntimeError(env_status, proc_env.stderr.read())
- env_lines = proc_env.stdout.read().strip().split('\n')
- # Set environment variables before running the system tests.
- for env_var in env_vars:
- line_prefix = 'export ' + env_var + '='
- value, = [line.split(line_prefix, 1)[1] for line in env_lines
- if line.startswith(line_prefix)]
- os.environ[env_var] = value
- run_module_tests(package,
- ignore_requirements=True)
- finally:
- cleanup(proc_start.pid)
-
-
-def main():
- """Main method to run this script."""
- parser = get_parser()
- args = parser.parse_args()
- run_tests_in_emulator(args.package)
-
-
-if __name__ == '__main__':
- main()
diff --git a/test_utils/scripts/update_docs.sh b/test_utils/scripts/update_docs.sh
deleted file mode 100755
index bbf6788b6a..0000000000
--- a/test_utils/scripts/update_docs.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/bin/bash
-
-# Copyright 2016 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -ev
-
-GH_OWNER='GoogleCloudPlatform'
-GH_PROJECT_NAME='google-cloud-python'
-
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-
-# Function to build the docs.
-function build_docs {
- rm -rf docs/_build/
- # -W -> warnings as errors
- # -T -> show full traceback on exception
- # -N -> no color
- sphinx-build \
- -W -T -N \
- -b html \
- -d docs/_build/doctrees \
- docs/ \
- docs/_build/html/
- return $?
-}
-
-# Only update docs if we are on CircleCI.
-if [[ "${CIRCLE_BRANCH}" == "master" ]] && [[ -z "${CIRCLE_PR_NUMBER}" ]]; then
- echo "Building new docs on a merged commit."
-elif [[ "$1" == "kokoro" ]]; then
- echo "Building and publishing docs on Kokoro."
-elif [[ -n "${CIRCLE_TAG}" ]]; then
- echo "Building new docs on a tag (but will not deploy)."
- build_docs
- exit $?
-else
- echo "Not on master nor a release tag."
- echo "Building new docs for testing purposes, but not deploying."
- build_docs
- exit $?
-fi
-
-# Adding GitHub pages branch. `git submodule add` checks it
-# out at HEAD.
-GH_PAGES_DIR='ghpages'
-git submodule add -q -b gh-pages \
- "git@github.com:${GH_OWNER}/${GH_PROJECT_NAME}" ${GH_PAGES_DIR}
-
-# Determine if we are building a new tag or are building docs
-# for master. Then build new docs in docs/_build from master.
-if [[ -n "${CIRCLE_TAG}" ]]; then
- # Sphinx will use the package version by default.
- build_docs
-else
- SPHINX_RELEASE=$(git log -1 --pretty=%h) build_docs
-fi
-
-# Update gh-pages with the created docs.
-cd ${GH_PAGES_DIR}
-git rm -fr latest/
-cp -R ../docs/_build/html/ latest/
-
-# Update the files push to gh-pages.
-git add .
-git status
-
-# If there are no changes, just exit cleanly.
-if [[ -z "$(git status --porcelain)" ]]; then
- echo "Nothing to commit. Exiting without pushing changes."
- exit
-fi
-
-# Commit to gh-pages branch to apply changes.
-git config --global user.email "dpebot@google.com"
-git config --global user.name "dpebot"
-git commit -m "Update docs after merge to master."
-
-# NOTE: This may fail if two docs updates (on merges to master)
-# happen in close proximity.
-git push -q origin HEAD:gh-pages
diff --git a/test_utils/setup.py b/test_utils/setup.py
deleted file mode 100644
index 8e9222a7f8..0000000000
--- a/test_utils/setup.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright 2017 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-from setuptools import find_packages
-from setuptools import setup
-
-
-PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__))
-
-
-# NOTE: This is duplicated throughout and we should try to
-# consolidate.
-SETUP_BASE = {
- 'author': 'Google Cloud Platform',
- 'author_email': 'googleapis-publisher@google.com',
- 'scripts': [],
- 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python',
- 'license': 'Apache 2.0',
- 'platforms': 'Posix; MacOS X; Windows',
- 'include_package_data': True,
- 'zip_safe': False,
- 'classifiers': [
- 'Development Status :: 4 - Beta',
- 'Intended Audience :: Developers',
- 'License :: OSI Approved :: Apache Software License',
- 'Operating System :: OS Independent',
- 'Programming Language :: Python :: 2',
- 'Programming Language :: Python :: 2.7',
- 'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6',
- 'Programming Language :: Python :: 3.7',
- 'Topic :: Internet',
- ],
-}
-
-
-REQUIREMENTS = [
- 'google-auth >= 0.4.0',
- 'six',
-]
-
-setup(
- name='google-cloud-testutils',
- version='0.24.0',
- description='System test utilities for google-cloud-python',
- packages=find_packages(),
- install_requires=REQUIREMENTS,
- python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*',
- **SETUP_BASE
-)
diff --git a/test_utils/test_utils/__init__.py b/test_utils/test_utils/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/test_utils/test_utils/imports.py b/test_utils/test_utils/imports.py
deleted file mode 100644
index 5991af7fc4..0000000000
--- a/test_utils/test_utils/imports.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2019 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import mock
-import six
-
-
-def maybe_fail_import(predicate):
- """Create and return a patcher that conditionally makes an import fail.
-
- Args:
- predicate (Callable[[...], bool]): A callable that, if it returns `True`,
- triggers an `ImportError`. It must accept the same arguments as the
- built-in `__import__` function.
- https://docs.python.org/3/library/functions.html#__import__
-
- Returns:
- A mock patcher object that can be used to enable patched import behavior.
- """
- orig_import = six.moves.builtins.__import__
-
- def custom_import(name, globals=None, locals=None, fromlist=(), level=0):
- if predicate(name, globals, locals, fromlist, level):
- raise ImportError
- return orig_import(name, globals, locals, fromlist, level)
-
- return mock.patch.object(six.moves.builtins, "__import__", new=custom_import)
diff --git a/test_utils/test_utils/retry.py b/test_utils/test_utils/retry.py
deleted file mode 100644
index e61c001a03..0000000000
--- a/test_utils/test_utils/retry.py
+++ /dev/null
@@ -1,207 +0,0 @@
-# Copyright 2016 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import time
-from functools import wraps
-
-import six
-
-MAX_TRIES = 4
-DELAY = 1
-BACKOFF = 2
-
-
-def _retry_all(_):
- """Retry all caught exceptions."""
- return True
-
-
-class BackoffFailed(Exception):
- """Retry w/ backoffs did not complete successfully."""
-
-
-class RetryBase(object):
- """Base for retrying calling a decorated function w/ exponential backoff.
-
- :type max_tries: int
- :param max_tries: Number of times to try (not retry) before giving up.
-
- :type delay: int
- :param delay: Initial delay between retries in seconds.
-
- :type backoff: int
- :param backoff: Backoff multiplier e.g. value of 2 will double the
- delay each retry.
-
- :type logger: logging.Logger instance
- :param logger: Logger to use. If None, print.
- """
- def __init__(self, max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
- logger=None):
- self.max_tries = max_tries
- self.delay = delay
- self.backoff = backoff
- self.logger = logger.warning if logger else six.print_
-
-
-class RetryErrors(RetryBase):
- """Decorator for retrying given exceptions in testing.
-
- :type exception: Exception or tuple of Exceptions
- :param exception: The exception to check or may be a tuple of
- exceptions to check.
-
- :type error_predicate: function, takes caught exception, returns bool
- :param error_predicate: Predicate evaluating whether to retry after a
- caught exception.
-
- :type max_tries: int
- :param max_tries: Number of times to try (not retry) before giving up.
-
- :type delay: int
- :param delay: Initial delay between retries in seconds.
-
- :type backoff: int
- :param backoff: Backoff multiplier e.g. value of 2 will double the
- delay each retry.
-
- :type logger: logging.Logger instance
- :param logger: Logger to use. If None, print.
- """
- def __init__(self, exception, error_predicate=_retry_all,
- max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
- logger=None):
- super(RetryErrors, self).__init__(max_tries, delay, backoff, logger)
- self.exception = exception
- self.error_predicate = error_predicate
-
- def __call__(self, to_wrap):
- @wraps(to_wrap)
- def wrapped_function(*args, **kwargs):
- tries = 0
- while tries < self.max_tries:
- try:
- return to_wrap(*args, **kwargs)
- except self.exception as caught_exception:
-
- if not self.error_predicate(caught_exception):
- raise
-
- delay = self.delay * self.backoff**tries
- msg = ("%s, Trying again in %d seconds..." %
- (caught_exception, delay))
- self.logger(msg)
-
- time.sleep(delay)
- tries += 1
- return to_wrap(*args, **kwargs)
-
- return wrapped_function
-
-
-class RetryResult(RetryBase):
- """Decorator for retrying based on non-error result.
-
- :type result_predicate: function, takes result, returns bool
- :param result_predicate: Predicate evaluating whether to retry after a
- result is returned.
-
- :type max_tries: int
- :param max_tries: Number of times to try (not retry) before giving up.
-
- :type delay: int
- :param delay: Initial delay between retries in seconds.
-
- :type backoff: int
- :param backoff: Backoff multiplier e.g. value of 2 will double the
- delay each retry.
-
- :type logger: logging.Logger instance
- :param logger: Logger to use. If None, print.
- """
- def __init__(self, result_predicate,
- max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
- logger=None):
- super(RetryResult, self).__init__(max_tries, delay, backoff, logger)
- self.result_predicate = result_predicate
-
- def __call__(self, to_wrap):
- @wraps(to_wrap)
- def wrapped_function(*args, **kwargs):
- tries = 0
- while tries < self.max_tries:
- result = to_wrap(*args, **kwargs)
- if self.result_predicate(result):
- return result
-
- delay = self.delay * self.backoff**tries
- msg = "%s. Trying again in %d seconds..." % (
- self.result_predicate.__name__, delay,)
- self.logger(msg)
-
- time.sleep(delay)
- tries += 1
- raise BackoffFailed()
-
- return wrapped_function
-
-
-class RetryInstanceState(RetryBase):
- """Decorator for retrying based on instance state.
-
- :type instance_predicate: function, takes instance, returns bool
- :param instance_predicate: Predicate evaluating whether to retry after an
- API-invoking method is called.
-
- :type max_tries: int
- :param max_tries: Number of times to try (not retry) before giving up.
-
- :type delay: int
- :param delay: Initial delay between retries in seconds.
-
- :type backoff: int
- :param backoff: Backoff multiplier e.g. value of 2 will double the
- delay each retry.
-
- :type logger: logging.Logger instance
- :param logger: Logger to use. If None, print.
- """
- def __init__(self, instance_predicate,
- max_tries=MAX_TRIES, delay=DELAY, backoff=BACKOFF,
- logger=None):
- super(RetryInstanceState, self).__init__(
- max_tries, delay, backoff, logger)
- self.instance_predicate = instance_predicate
-
- def __call__(self, to_wrap):
- instance = to_wrap.__self__ # only instance methods allowed
-
- @wraps(to_wrap)
- def wrapped_function(*args, **kwargs):
- tries = 0
- while tries < self.max_tries:
- result = to_wrap(*args, **kwargs)
- if self.instance_predicate(instance):
- return result
-
- delay = self.delay * self.backoff**tries
- msg = "%s. Trying again in %d seconds..." % (
- self.instance_predicate.__name__, delay,)
- self.logger(msg)
-
- time.sleep(delay)
- tries += 1
- raise BackoffFailed()
-
- return wrapped_function
diff --git a/test_utils/test_utils/system.py b/test_utils/test_utils/system.py
deleted file mode 100644
index 590dc62a06..0000000000
--- a/test_utils/test_utils/system.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# Copyright 2014 Google LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-import os
-import sys
-import time
-
-import google.auth.credentials
-from google.auth.environment_vars import CREDENTIALS as TEST_CREDENTIALS
-
-
-# From shell environ. May be None.
-CREDENTIALS = os.getenv(TEST_CREDENTIALS)
-
-ENVIRON_ERROR_MSG = """\
-To run the system tests, you need to set some environment variables.
-Please check the CONTRIBUTING guide for instructions.
-"""
-
-
-class EmulatorCreds(google.auth.credentials.Credentials):
- """A mock credential object.
-
- Used to avoid unnecessary token refreshing or reliance on the network
- while an emulator is running.
- """
-
- def __init__(self): # pylint: disable=super-init-not-called
- self.token = b'seekrit'
- self.expiry = None
-
- @property
- def valid(self):
- """Would-be validity check of the credentials.
-
- Always is :data:`True`.
- """
- return True
-
- def refresh(self, unused_request): # pylint: disable=unused-argument
- """Off-limits implementation for abstract method."""
- raise RuntimeError('Should never be refreshed.')
-
-
-def check_environ():
- err_msg = None
- if CREDENTIALS is None:
- err_msg = '\nMissing variables: ' + TEST_CREDENTIALS
- elif not os.path.isfile(CREDENTIALS):
- err_msg = '\nThe %s path %r is not a file.' % (TEST_CREDENTIALS,
- CREDENTIALS)
-
- if err_msg is not None:
- msg = ENVIRON_ERROR_MSG + err_msg
- print(msg, file=sys.stderr)
- sys.exit(1)
-
-
-def unique_resource_id(delimiter='_'):
- """A unique identifier for a resource.
-
- Intended to help locate resources created in particular
- testing environments and at particular times.
- """
- build_id = os.getenv('CIRCLE_BUILD_NUM', '')
- if build_id == '':
- return '%s%d' % (delimiter, 1000 * time.time())
- else:
- return '%s%s%s%d' % (delimiter, build_id, delimiter, time.time())
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
index b3a4b8b6cc..2eac9c8653 100644
--- a/testing/constraints-3.6.txt
+++ b/testing/constraints-3.6.txt
@@ -5,7 +5,7 @@
#
# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
# Then this file should have foo==1.14.0
-google-api-core==1.22.2
+google-api-core==1.26.0
google-cloud-core==1.4.1
grpc-google-iam-v1==0.12.3
libcst==0.2.5
diff --git a/tests/system/test_system.py b/tests/system/test_system.py
index ad2b8a9178..845e79f805 100644
--- a/tests/system/test_system.py
+++ b/tests/system/test_system.py
@@ -68,6 +68,7 @@
INSTANCE_ID = os.environ.get(
"GOOGLE_CLOUD_TESTS_SPANNER_INSTANCE", "google-cloud-python-systest"
)
+MULTI_REGION_INSTANCE_ID = "multi-region" + unique_resource_id("-")
EXISTING_INSTANCES = []
COUNTERS_TABLE = "counters"
COUNTERS_COLUMNS = ("name", "value")
@@ -353,9 +354,25 @@ def setUpClass(cls):
SPANNER_OPERATION_TIMEOUT_IN_SECONDS
) # raises on failure / timeout.
+ # Create a multi-region instance
+ multi_region_config = "nam3"
+ config_name = "{}/instanceConfigs/{}".format(
+ Config.CLIENT.project_name, multi_region_config
+ )
+ create_time = str(int(time.time()))
+ labels = {"python-spanner-systests": "true", "created": create_time}
+ cls._instance = Config.CLIENT.instance(
+ instance_id=MULTI_REGION_INSTANCE_ID,
+ configuration_name=config_name,
+ labels=labels,
+ )
+ operation = cls._instance.create()
+ operation.result(SPANNER_OPERATION_TIMEOUT_IN_SECONDS)
+
@classmethod
def tearDownClass(cls):
cls._db.drop()
+ cls._instance.delete()
def setUp(self):
self.to_delete = []
@@ -443,6 +460,42 @@ def test_create_database_pitr_success(self):
for result in results:
self.assertEqual(result[0], retention_period)
+ @unittest.skipIf(
+ USE_EMULATOR, "Default leader setting is not supported by the emulator"
+ )
+ def test_create_database_with_default_leader_success(self):
+ pool = BurstyPool(labels={"testcase": "create_database_default_leader"})
+
+ temp_db_id = "temp_db" + unique_resource_id("_")
+ default_leader = "us-east4"
+ ddl_statements = [
+ "ALTER DATABASE {}"
+ " SET OPTIONS (default_leader = '{}')".format(temp_db_id, default_leader)
+ ]
+ temp_db = self._instance.database(
+ temp_db_id, pool=pool, ddl_statements=ddl_statements
+ )
+ operation = temp_db.create()
+ self.to_delete.append(temp_db)
+
+ # We want to make sure the operation completes.
+ operation.result(30) # raises on failure / timeout.
+
+ database_ids = [database.name for database in self._instance.list_databases()]
+ self.assertIn(temp_db.name, database_ids)
+
+ temp_db.reload()
+ self.assertEqual(temp_db.default_leader, default_leader)
+
+ with temp_db.snapshot() as snapshot:
+ results = snapshot.execute_sql(
+ "SELECT OPTION_VALUE AS default_leader "
+ "FROM INFORMATION_SCHEMA.DATABASE_OPTIONS "
+ "WHERE SCHEMA_NAME = '' AND OPTION_NAME = 'default_leader'"
+ )
+ for result in results:
+ self.assertEqual(result[0], default_leader)
+
def test_table_not_found(self):
temp_db_id = "temp_db" + unique_resource_id("_")
@@ -551,6 +604,36 @@ def test_update_database_ddl_pitr_success(self):
self.assertEqual(temp_db.version_retention_period, retention_period)
self.assertEqual(len(temp_db.ddl_statements), len(ddl_statements))
+ @unittest.skipIf(
+ USE_EMULATOR, "Default leader update is not supported by the emulator"
+ )
+ def test_update_database_ddl_default_leader_success(self):
+ pool = BurstyPool(labels={"testcase": "update_database_ddl_default_leader"})
+
+ temp_db_id = "temp_db" + unique_resource_id("_")
+ default_leader = "us-east4"
+ temp_db = self._instance.database(temp_db_id, pool=pool)
+ create_op = temp_db.create()
+ self.to_delete.append(temp_db)
+
+ # We want to make sure the operation completes.
+ create_op.result(240) # raises on failure / timeout.
+
+ self.assertIsNone(temp_db.default_leader)
+
+ ddl_statements = DDL_STATEMENTS + [
+ "ALTER DATABASE {}"
+ " SET OPTIONS (default_leader = '{}')".format(temp_db_id, default_leader)
+ ]
+ operation = temp_db.update_ddl(ddl_statements)
+
+ # We want to make sure the operation completes.
+ operation.result(240) # raises on failure / timeout.
+
+ temp_db.reload()
+ self.assertEqual(temp_db.default_leader, default_leader)
+ self.assertEqual(len(temp_db.ddl_statements), len(ddl_statements))
+
def test_db_batch_insert_then_db_snapshot_read(self):
retry = RetryInstanceState(_has_all_ddl)
retry(self._db.reload)()
diff --git a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py
index 28269154e0..1ca405899b 100644
--- a/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py
+++ b/tests/unit/gapic/spanner_admin_database_v1/test_database_admin.py
@@ -42,9 +42,6 @@
)
from google.cloud.spanner_admin_database_v1.services.database_admin import pagers
from google.cloud.spanner_admin_database_v1.services.database_admin import transports
-from google.cloud.spanner_admin_database_v1.services.database_admin.transports.base import (
- _API_CORE_VERSION,
-)
from google.cloud.spanner_admin_database_v1.services.database_admin.transports.base import (
_GOOGLE_AUTH_VERSION,
)
@@ -66,8 +63,9 @@
import google.auth
-# TODO(busunkim): Once google-api-core >= 1.26.0 is required:
-# - Delete all the api-core and auth "less than" test cases
+# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
+# through google-api-core:
+# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
@@ -78,16 +76,6 @@
reason="This test requires google-auth >= 1.25.0",
)
-requires_api_core_lt_1_26_0 = pytest.mark.skipif(
- packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"),
- reason="This test requires google-api-core < 1.26.0",
-)
-
-requires_api_core_gte_1_26_0 = pytest.mark.skipif(
- packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"),
- reason="This test requires google-api-core >= 1.26.0",
-)
-
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
@@ -150,6 +138,31 @@ def test_database_admin_client_from_service_account_info(client_class):
assert client.transport._host == "spanner.googleapis.com:443"
+@pytest.mark.parametrize(
+ "transport_class,transport_name",
+ [
+ (transports.DatabaseAdminGrpcTransport, "grpc"),
+ (transports.DatabaseAdminGrpcAsyncIOTransport, "grpc_asyncio"),
+ ],
+)
+def test_database_admin_client_service_account_always_use_jwt(
+ transport_class, transport_name
+):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport = transport_class(credentials=creds, always_use_jwt_access=True)
+ use_jwt.assert_called_once_with(True)
+
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport = transport_class(credentials=creds, always_use_jwt_access=False)
+ use_jwt.assert_not_called()
+
+
@pytest.mark.parametrize(
"client_class", [DatabaseAdminClient, DatabaseAdminAsyncClient,]
)
@@ -229,6 +242,7 @@ def test_database_admin_client_client_options(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
@@ -245,6 +259,7 @@ def test_database_admin_client_client_options(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
@@ -261,6 +276,7 @@ def test_database_admin_client_client_options(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
@@ -289,6 +305,7 @@ def test_database_admin_client_client_options(
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -355,6 +372,7 @@ def test_database_admin_client_mtls_env_auto(
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
@@ -388,6 +406,7 @@ def test_database_admin_client_mtls_env_auto(
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
@@ -409,6 +428,7 @@ def test_database_admin_client_mtls_env_auto(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -439,6 +459,7 @@ def test_database_admin_client_client_options_scopes(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -469,6 +490,7 @@ def test_database_admin_client_client_options_credentials_file(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -488,6 +510,7 @@ def test_database_admin_client_client_options_from_dict():
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -1085,6 +1108,7 @@ def test_get_database(
name="name_value",
state=spanner_database_admin.Database.State.CREATING,
version_retention_period="version_retention_period_value",
+ default_leader="default_leader_value",
)
response = client.get_database(request)
@@ -1098,6 +1122,7 @@ def test_get_database(
assert response.name == "name_value"
assert response.state == spanner_database_admin.Database.State.CREATING
assert response.version_retention_period == "version_retention_period_value"
+ assert response.default_leader == "default_leader_value"
def test_get_database_from_dict():
@@ -1140,6 +1165,7 @@ async def test_get_database_async(
name="name_value",
state=spanner_database_admin.Database.State.CREATING,
version_retention_period="version_retention_period_value",
+ default_leader="default_leader_value",
)
)
response = await client.get_database(request)
@@ -1154,6 +1180,7 @@ async def test_get_database_async(
assert response.name == "name_value"
assert response.state == spanner_database_admin.Database.State.CREATING
assert response.version_retention_period == "version_retention_period_value"
+ assert response.default_leader == "default_leader_value"
@pytest.mark.asyncio
@@ -4993,7 +5020,6 @@ def test_database_admin_transport_auth_adc_old_google_auth(transport_class):
(transports.DatabaseAdminGrpcAsyncIOTransport, grpc_helpers_async),
],
)
-@requires_api_core_gte_1_26_0
def test_database_admin_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
@@ -5025,82 +5051,6 @@ def test_database_admin_transport_create_channel(transport_class, grpc_helpers):
)
-@pytest.mark.parametrize(
- "transport_class,grpc_helpers",
- [
- (transports.DatabaseAdminGrpcTransport, grpc_helpers),
- (transports.DatabaseAdminGrpcAsyncIOTransport, grpc_helpers_async),
- ],
-)
-@requires_api_core_lt_1_26_0
-def test_database_admin_transport_create_channel_old_api_core(
- transport_class, grpc_helpers
-):
- # If credentials and host are not provided, the transport class should use
- # ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
- creds = ga_credentials.AnonymousCredentials()
- adc.return_value = (creds, None)
- transport_class(quota_project_id="octopus")
-
- create_channel.assert_called_with(
- "spanner.googleapis.com:443",
- credentials=creds,
- credentials_file=None,
- quota_project_id="octopus",
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.admin",
- ),
- ssl_credentials=None,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
-
-
-@pytest.mark.parametrize(
- "transport_class,grpc_helpers",
- [
- (transports.DatabaseAdminGrpcTransport, grpc_helpers),
- (transports.DatabaseAdminGrpcAsyncIOTransport, grpc_helpers_async),
- ],
-)
-@requires_api_core_lt_1_26_0
-def test_database_admin_transport_create_channel_user_scopes(
- transport_class, grpc_helpers
-):
- # If credentials and host are not provided, the transport class should use
- # ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
- creds = ga_credentials.AnonymousCredentials()
- adc.return_value = (creds, None)
-
- transport_class(quota_project_id="octopus", scopes=["1", "2"])
-
- create_channel.assert_called_with(
- "spanner.googleapis.com:443",
- credentials=creds,
- credentials_file=None,
- quota_project_id="octopus",
- scopes=["1", "2"],
- ssl_credentials=None,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
-
-
@pytest.mark.parametrize(
"transport_class",
[
@@ -5123,10 +5073,7 @@ def test_database_admin_grpc_transport_client_cert_source_for_mtls(transport_cla
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.admin",
- ),
+ scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
@@ -5233,10 +5180,7 @@ def test_database_admin_transport_channel_mtls_with_client_cert_source(transport
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.admin",
- ),
+ scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
@@ -5283,10 +5227,7 @@ def test_database_admin_transport_channel_mtls_with_adc(transport_class):
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.admin",
- ),
+ scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
diff --git a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py
index 038f4b0e9a..567d56d3c6 100644
--- a/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py
+++ b/tests/unit/gapic/spanner_admin_instance_v1/test_instance_admin.py
@@ -42,9 +42,6 @@
)
from google.cloud.spanner_admin_instance_v1.services.instance_admin import pagers
from google.cloud.spanner_admin_instance_v1.services.instance_admin import transports
-from google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.base import (
- _API_CORE_VERSION,
-)
from google.cloud.spanner_admin_instance_v1.services.instance_admin.transports.base import (
_GOOGLE_AUTH_VERSION,
)
@@ -59,8 +56,9 @@
import google.auth
-# TODO(busunkim): Once google-api-core >= 1.26.0 is required:
-# - Delete all the api-core and auth "less than" test cases
+# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
+# through google-api-core:
+# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
@@ -71,16 +69,6 @@
reason="This test requires google-auth >= 1.25.0",
)
-requires_api_core_lt_1_26_0 = pytest.mark.skipif(
- packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"),
- reason="This test requires google-api-core < 1.26.0",
-)
-
-requires_api_core_gte_1_26_0 = pytest.mark.skipif(
- packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"),
- reason="This test requires google-api-core >= 1.26.0",
-)
-
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
@@ -143,6 +131,31 @@ def test_instance_admin_client_from_service_account_info(client_class):
assert client.transport._host == "spanner.googleapis.com:443"
+@pytest.mark.parametrize(
+ "transport_class,transport_name",
+ [
+ (transports.InstanceAdminGrpcTransport, "grpc"),
+ (transports.InstanceAdminGrpcAsyncIOTransport, "grpc_asyncio"),
+ ],
+)
+def test_instance_admin_client_service_account_always_use_jwt(
+ transport_class, transport_name
+):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport = transport_class(credentials=creds, always_use_jwt_access=True)
+ use_jwt.assert_called_once_with(True)
+
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport = transport_class(credentials=creds, always_use_jwt_access=False)
+ use_jwt.assert_not_called()
+
+
@pytest.mark.parametrize(
"client_class", [InstanceAdminClient, InstanceAdminAsyncClient,]
)
@@ -222,6 +235,7 @@ def test_instance_admin_client_client_options(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
@@ -238,6 +252,7 @@ def test_instance_admin_client_client_options(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
@@ -254,6 +269,7 @@ def test_instance_admin_client_client_options(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
@@ -282,6 +298,7 @@ def test_instance_admin_client_client_options(
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -348,6 +365,7 @@ def test_instance_admin_client_mtls_env_auto(
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
@@ -381,6 +399,7 @@ def test_instance_admin_client_mtls_env_auto(
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
@@ -402,6 +421,7 @@ def test_instance_admin_client_mtls_env_auto(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -432,6 +452,7 @@ def test_instance_admin_client_client_options_scopes(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -462,6 +483,7 @@ def test_instance_admin_client_client_options_credentials_file(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -481,6 +503,7 @@ def test_instance_admin_client_client_options_from_dict():
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -898,7 +921,9 @@ def test_get_instance_config(
) as call:
# Designate an appropriate return value for the call.
call.return_value = spanner_instance_admin.InstanceConfig(
- name="name_value", display_name="display_name_value",
+ name="name_value",
+ display_name="display_name_value",
+ leader_options=["leader_options_value"],
)
response = client.get_instance_config(request)
@@ -911,6 +936,7 @@ def test_get_instance_config(
assert isinstance(response, spanner_instance_admin.InstanceConfig)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
+ assert response.leader_options == ["leader_options_value"]
def test_get_instance_config_from_dict():
@@ -954,7 +980,9 @@ async def test_get_instance_config_async(
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
spanner_instance_admin.InstanceConfig(
- name="name_value", display_name="display_name_value",
+ name="name_value",
+ display_name="display_name_value",
+ leader_options=["leader_options_value"],
)
)
response = await client.get_instance_config(request)
@@ -968,6 +996,7 @@ async def test_get_instance_config_async(
assert isinstance(response, spanner_instance_admin.InstanceConfig)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
+ assert response.leader_options == ["leader_options_value"]
@pytest.mark.asyncio
@@ -3270,7 +3299,6 @@ def test_instance_admin_transport_auth_adc_old_google_auth(transport_class):
(transports.InstanceAdminGrpcAsyncIOTransport, grpc_helpers_async),
],
)
-@requires_api_core_gte_1_26_0
def test_instance_admin_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
@@ -3302,82 +3330,6 @@ def test_instance_admin_transport_create_channel(transport_class, grpc_helpers):
)
-@pytest.mark.parametrize(
- "transport_class,grpc_helpers",
- [
- (transports.InstanceAdminGrpcTransport, grpc_helpers),
- (transports.InstanceAdminGrpcAsyncIOTransport, grpc_helpers_async),
- ],
-)
-@requires_api_core_lt_1_26_0
-def test_instance_admin_transport_create_channel_old_api_core(
- transport_class, grpc_helpers
-):
- # If credentials and host are not provided, the transport class should use
- # ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
- creds = ga_credentials.AnonymousCredentials()
- adc.return_value = (creds, None)
- transport_class(quota_project_id="octopus")
-
- create_channel.assert_called_with(
- "spanner.googleapis.com:443",
- credentials=creds,
- credentials_file=None,
- quota_project_id="octopus",
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.admin",
- ),
- ssl_credentials=None,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
-
-
-@pytest.mark.parametrize(
- "transport_class,grpc_helpers",
- [
- (transports.InstanceAdminGrpcTransport, grpc_helpers),
- (transports.InstanceAdminGrpcAsyncIOTransport, grpc_helpers_async),
- ],
-)
-@requires_api_core_lt_1_26_0
-def test_instance_admin_transport_create_channel_user_scopes(
- transport_class, grpc_helpers
-):
- # If credentials and host are not provided, the transport class should use
- # ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
- creds = ga_credentials.AnonymousCredentials()
- adc.return_value = (creds, None)
-
- transport_class(quota_project_id="octopus", scopes=["1", "2"])
-
- create_channel.assert_called_with(
- "spanner.googleapis.com:443",
- credentials=creds,
- credentials_file=None,
- quota_project_id="octopus",
- scopes=["1", "2"],
- ssl_credentials=None,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
-
-
@pytest.mark.parametrize(
"transport_class",
[
@@ -3400,10 +3352,7 @@ def test_instance_admin_grpc_transport_client_cert_source_for_mtls(transport_cla
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.admin",
- ),
+ scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
@@ -3510,10 +3459,7 @@ def test_instance_admin_transport_channel_mtls_with_client_cert_source(transport
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.admin",
- ),
+ scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
@@ -3560,10 +3506,7 @@ def test_instance_admin_transport_channel_mtls_with_adc(transport_class):
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.admin",
- ),
+ scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
diff --git a/tests/unit/gapic/spanner_v1/test_spanner.py b/tests/unit/gapic/spanner_v1/test_spanner.py
index 9b57993367..86557f33e4 100644
--- a/tests/unit/gapic/spanner_v1/test_spanner.py
+++ b/tests/unit/gapic/spanner_v1/test_spanner.py
@@ -35,7 +35,6 @@
from google.cloud.spanner_v1.services.spanner import SpannerClient
from google.cloud.spanner_v1.services.spanner import pagers
from google.cloud.spanner_v1.services.spanner import transports
-from google.cloud.spanner_v1.services.spanner.transports.base import _API_CORE_VERSION
from google.cloud.spanner_v1.services.spanner.transports.base import (
_GOOGLE_AUTH_VERSION,
)
@@ -54,8 +53,9 @@
import google.auth
-# TODO(busunkim): Once google-api-core >= 1.26.0 is required:
-# - Delete all the api-core and auth "less than" test cases
+# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
+# through google-api-core:
+# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
@@ -66,16 +66,6 @@
reason="This test requires google-auth >= 1.25.0",
)
-requires_api_core_lt_1_26_0 = pytest.mark.skipif(
- packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"),
- reason="This test requires google-api-core < 1.26.0",
-)
-
-requires_api_core_gte_1_26_0 = pytest.mark.skipif(
- packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"),
- reason="This test requires google-api-core >= 1.26.0",
-)
-
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
@@ -130,6 +120,29 @@ def test_spanner_client_from_service_account_info(client_class):
assert client.transport._host == "spanner.googleapis.com:443"
+@pytest.mark.parametrize(
+ "transport_class,transport_name",
+ [
+ (transports.SpannerGrpcTransport, "grpc"),
+ (transports.SpannerGrpcAsyncIOTransport, "grpc_asyncio"),
+ ],
+)
+def test_spanner_client_service_account_always_use_jwt(transport_class, transport_name):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport = transport_class(credentials=creds, always_use_jwt_access=True)
+ use_jwt.assert_called_once_with(True)
+
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport = transport_class(credentials=creds, always_use_jwt_access=False)
+ use_jwt.assert_not_called()
+
+
@pytest.mark.parametrize("client_class", [SpannerClient, SpannerAsyncClient,])
def test_spanner_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
@@ -197,6 +210,7 @@ def test_spanner_client_client_options(client_class, transport_class, transport_
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
@@ -213,6 +227,7 @@ def test_spanner_client_client_options(client_class, transport_class, transport_
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
@@ -229,6 +244,7 @@ def test_spanner_client_client_options(client_class, transport_class, transport_
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
@@ -257,6 +273,7 @@ def test_spanner_client_client_options(client_class, transport_class, transport_
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -319,6 +336,7 @@ def test_spanner_client_mtls_env_auto(
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
@@ -352,6 +370,7 @@ def test_spanner_client_mtls_env_auto(
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
@@ -373,6 +392,7 @@ def test_spanner_client_mtls_env_auto(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -399,6 +419,7 @@ def test_spanner_client_client_options_scopes(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -425,6 +446,7 @@ def test_spanner_client_client_options_credentials_file(
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -442,6 +464,7 @@ def test_spanner_client_client_options_from_dict():
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
+ always_use_jwt_access=True,
)
@@ -3385,7 +3408,6 @@ def test_spanner_transport_auth_adc_old_google_auth(transport_class):
(transports.SpannerGrpcAsyncIOTransport, grpc_helpers_async),
],
)
-@requires_api_core_gte_1_26_0
def test_spanner_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
@@ -3417,78 +3439,6 @@ def test_spanner_transport_create_channel(transport_class, grpc_helpers):
)
-@pytest.mark.parametrize(
- "transport_class,grpc_helpers",
- [
- (transports.SpannerGrpcTransport, grpc_helpers),
- (transports.SpannerGrpcAsyncIOTransport, grpc_helpers_async),
- ],
-)
-@requires_api_core_lt_1_26_0
-def test_spanner_transport_create_channel_old_api_core(transport_class, grpc_helpers):
- # If credentials and host are not provided, the transport class should use
- # ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
- creds = ga_credentials.AnonymousCredentials()
- adc.return_value = (creds, None)
- transport_class(quota_project_id="octopus")
-
- create_channel.assert_called_with(
- "spanner.googleapis.com:443",
- credentials=creds,
- credentials_file=None,
- quota_project_id="octopus",
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.data",
- ),
- ssl_credentials=None,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
-
-
-@pytest.mark.parametrize(
- "transport_class,grpc_helpers",
- [
- (transports.SpannerGrpcTransport, grpc_helpers),
- (transports.SpannerGrpcAsyncIOTransport, grpc_helpers_async),
- ],
-)
-@requires_api_core_lt_1_26_0
-def test_spanner_transport_create_channel_user_scopes(transport_class, grpc_helpers):
- # If credentials and host are not provided, the transport class should use
- # ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
- creds = ga_credentials.AnonymousCredentials()
- adc.return_value = (creds, None)
-
- transport_class(quota_project_id="octopus", scopes=["1", "2"])
-
- create_channel.assert_called_with(
- "spanner.googleapis.com:443",
- credentials=creds,
- credentials_file=None,
- quota_project_id="octopus",
- scopes=["1", "2"],
- ssl_credentials=None,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
-
-
@pytest.mark.parametrize(
"transport_class",
[transports.SpannerGrpcTransport, transports.SpannerGrpcAsyncIOTransport],
@@ -3508,10 +3458,7 @@ def test_spanner_grpc_transport_client_cert_source_for_mtls(transport_class):
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.data",
- ),
+ scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
@@ -3615,10 +3562,7 @@ def test_spanner_transport_channel_mtls_with_client_cert_source(transport_class)
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.data",
- ),
+ scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
@@ -3662,10 +3606,7 @@ def test_spanner_transport_channel_mtls_with_adc(transport_class):
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
- scopes=(
- "https://www.googleapis.com/auth/cloud-platform",
- "https://www.googleapis.com/auth/spanner.data",
- ),
+ scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
diff --git a/tests/unit/spanner_dbapi/test_connect.py b/tests/unit/spanner_dbapi/test_connect.py
index a18781ffd1..96dcb20e01 100644
--- a/tests/unit/spanner_dbapi/test_connect.py
+++ b/tests/unit/spanner_dbapi/test_connect.py
@@ -20,6 +20,12 @@
import google.auth.credentials
+INSTANCE = "test-instance"
+DATABASE = "test-database"
+PROJECT = "test-project"
+USER_AGENT = "user-agent"
+
+
def _make_credentials():
class _CredentialsWithScopes(
google.auth.credentials.Credentials, google.auth.credentials.Scoped
@@ -29,138 +35,105 @@ class _CredentialsWithScopes(
return mock.Mock(spec=_CredentialsWithScopes)
+@mock.patch("google.cloud.spanner_v1.Client")
class Test_connect(unittest.TestCase):
- def test_connect(self):
+ def test_w_implicit(self, mock_client):
from google.cloud.spanner_dbapi import connect
from google.cloud.spanner_dbapi import Connection
- PROJECT = "test-project"
- USER_AGENT = "user-agent"
- CREDENTIALS = _make_credentials()
-
- with mock.patch("google.cloud.spanner_v1.Client") as client_mock:
- connection = connect(
- "test-instance",
- "test-database",
- PROJECT,
- CREDENTIALS,
- user_agent=USER_AGENT,
- )
+ client = mock_client.return_value
+ instance = client.instance.return_value
+ database = instance.database.return_value
- self.assertIsInstance(connection, Connection)
-
- client_mock.assert_called_once_with(
- project=PROJECT, credentials=CREDENTIALS, client_info=mock.ANY
- )
-
- def test_instance_not_found(self):
- from google.cloud.spanner_dbapi import connect
+ connection = connect(INSTANCE, DATABASE)
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=False,
- ) as exists_mock:
+ self.assertIsInstance(connection, Connection)
- with self.assertRaises(ValueError):
- connect("test-instance", "test-database")
+ self.assertIs(connection.instance, instance)
+ client.instance.assert_called_once_with(INSTANCE)
- exists_mock.assert_called_once_with()
+ self.assertIs(connection.database, database)
+ instance.database.assert_called_once_with(DATABASE, pool=None)
+ # Datbase constructs its own pool
+ self.assertIsNotNone(connection.database._pool)
- def test_database_not_found(self):
+ def test_w_explicit(self, mock_client):
+ from google.cloud.spanner_v1.pool import AbstractSessionPool
from google.cloud.spanner_dbapi import connect
+ from google.cloud.spanner_dbapi import Connection
+ from google.cloud.spanner_dbapi.version import PY_VERSION
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=False,
- ) as exists_mock:
-
- with self.assertRaises(ValueError):
- connect("test-instance", "test-database")
-
- exists_mock.assert_called_once_with()
+ credentials = _make_credentials()
+ pool = mock.create_autospec(AbstractSessionPool)
+ client = mock_client.return_value
+ instance = client.instance.return_value
+ database = instance.database.return_value
- def test_connect_instance_id(self):
- from google.cloud.spanner_dbapi import connect
- from google.cloud.spanner_dbapi import Connection
+ connection = connect(
+ INSTANCE, DATABASE, PROJECT, credentials, pool=pool, user_agent=USER_AGENT,
+ )
- INSTANCE = "test-instance"
+ self.assertIsInstance(connection, Connection)
- with mock.patch(
- "google.cloud.spanner_v1.client.Client.instance"
- ) as instance_mock:
- connection = connect(INSTANCE, "test-database")
+ mock_client.assert_called_once_with(
+ project=PROJECT, credentials=credentials, client_info=mock.ANY
+ )
+ client_info = mock_client.call_args_list[0][1]["client_info"]
+ self.assertEqual(client_info.user_agent, USER_AGENT)
+ self.assertEqual(client_info.python_version, PY_VERSION)
- instance_mock.assert_called_once_with(INSTANCE)
+ self.assertIs(connection.instance, instance)
+ client.instance.assert_called_once_with(INSTANCE)
- self.assertIsInstance(connection, Connection)
+ self.assertIs(connection.database, database)
+ instance.database.assert_called_once_with(DATABASE, pool=pool)
- def test_connect_database_id(self):
+ def test_w_instance_not_found(self, mock_client):
from google.cloud.spanner_dbapi import connect
- from google.cloud.spanner_dbapi import Connection
-
- DATABASE = "test-database"
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.database"
- ) as database_mock:
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- connection = connect("test-instance", DATABASE)
+ client = mock_client.return_value
+ instance = client.instance.return_value
+ instance.exists.return_value = False
- database_mock.assert_called_once_with(DATABASE, pool=mock.ANY)
+ with self.assertRaises(ValueError):
+ connect(INSTANCE, DATABASE)
- self.assertIsInstance(connection, Connection)
+ instance.exists.assert_called_once_with()
- def test_default_sessions_pool(self):
+ def test_w_database_not_found(self, mock_client):
from google.cloud.spanner_dbapi import connect
- with mock.patch("google.cloud.spanner_v1.instance.Instance.database"):
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ client = mock_client.return_value
+ instance = client.instance.return_value
+ database = instance.database.return_value
+ database.exists.return_value = False
- self.assertIsNotNone(connection.database._pool)
+ with self.assertRaises(ValueError):
+ connect(INSTANCE, DATABASE)
- def test_sessions_pool(self):
+ database.exists.assert_called_once_with()
+
+ def test_w_credential_file_path(self, mock_client):
from google.cloud.spanner_dbapi import connect
- from google.cloud.spanner_v1.pool import FixedSizePool
+ from google.cloud.spanner_dbapi import Connection
+ from google.cloud.spanner_dbapi.version import PY_VERSION
- database_id = "test-database"
- pool = FixedSizePool()
+ credentials_path = "dummy/file/path.json"
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.database"
- ) as database_mock:
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- connect("test-instance", database_id, pool=pool)
- database_mock.assert_called_once_with(database_id, pool=pool)
+ connection = connect(
+ INSTANCE,
+ DATABASE,
+ PROJECT,
+ credentials=credentials_path,
+ user_agent=USER_AGENT,
+ )
- def test_connect_w_credential_file_path(self):
- from google.cloud.spanner_dbapi import connect
- from google.cloud.spanner_dbapi import Connection
+ self.assertIsInstance(connection, Connection)
- PROJECT = "test-project"
- USER_AGENT = "user-agent"
- credentials = "dummy/file/path.json"
-
- with mock.patch(
- "google.cloud.spanner_v1.Client.from_service_account_json"
- ) as client_mock:
- connection = connect(
- "test-instance",
- "test-database",
- PROJECT,
- credentials=credentials,
- user_agent=USER_AGENT,
- )
-
- self.assertIsInstance(connection, Connection)
-
- client_mock.assert_called_once_with(
- credentials, project=PROJECT, client_info=mock.ANY
- )
+ factory = mock_client.from_service_account_json
+ factory.assert_called_once_with(
+ credentials_path, project=PROJECT, client_info=mock.ANY,
+ )
+ client_info = factory.call_args_list[0][1]["client_info"]
+ self.assertEqual(client_info.user_agent, USER_AGENT)
+ self.assertEqual(client_info.python_version, PY_VERSION)
diff --git a/tests/unit/spanner_dbapi/test_connection.py b/tests/unit/spanner_dbapi/test_connection.py
index 772ac35032..48129dcc2f 100644
--- a/tests/unit/spanner_dbapi/test_connection.py
+++ b/tests/unit/spanner_dbapi/test_connection.py
@@ -18,6 +18,11 @@
import unittest
import warnings
+PROJECT = "test-project"
+INSTANCE = "test-instance"
+DATABASE = "test-database"
+USER_AGENT = "user-agent"
+
def _make_credentials():
from google.auth import credentials
@@ -29,78 +34,62 @@ class _CredentialsWithScopes(credentials.Credentials, credentials.Scoped):
class TestConnection(unittest.TestCase):
-
- PROJECT = "test-project"
- INSTANCE = "test-instance"
- DATABASE = "test-database"
- USER_AGENT = "user-agent"
- CREDENTIALS = _make_credentials()
-
def _get_client_info(self):
from google.api_core.gapic_v1.client_info import ClientInfo
- return ClientInfo(user_agent=self.USER_AGENT)
+ return ClientInfo(user_agent=USER_AGENT)
def _make_connection(self):
from google.cloud.spanner_dbapi import Connection
from google.cloud.spanner_v1.instance import Instance
# We don't need a real Client object to test the constructor
- instance = Instance(self.INSTANCE, client=None)
- database = instance.database(self.DATABASE)
+ instance = Instance(INSTANCE, client=None)
+ database = instance.database(DATABASE)
return Connection(instance, database)
- def test_autocommit_setter_transaction_not_started(self):
+ @mock.patch("google.cloud.spanner_dbapi.connection.Connection.commit")
+ def test_autocommit_setter_transaction_not_started(self, mock_commit):
connection = self._make_connection()
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.commit"
- ) as mock_commit:
- connection.autocommit = True
- mock_commit.assert_not_called()
- self.assertTrue(connection._autocommit)
+ connection.autocommit = True
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.commit"
- ) as mock_commit:
- connection.autocommit = False
- mock_commit.assert_not_called()
- self.assertFalse(connection._autocommit)
+ mock_commit.assert_not_called()
+ self.assertTrue(connection._autocommit)
- def test_autocommit_setter_transaction_started(self):
+ connection.autocommit = False
+ mock_commit.assert_not_called()
+ self.assertFalse(connection._autocommit)
+
+ @mock.patch("google.cloud.spanner_dbapi.connection.Connection.commit")
+ def test_autocommit_setter_transaction_started(self, mock_commit):
connection = self._make_connection()
+ connection._transaction = mock.Mock(committed=False, rolled_back=False)
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.commit"
- ) as mock_commit:
- connection._transaction = mock.Mock(committed=False, rolled_back=False)
+ connection.autocommit = True
- connection.autocommit = True
- mock_commit.assert_called_once()
- self.assertTrue(connection._autocommit)
+ mock_commit.assert_called_once()
+ self.assertTrue(connection._autocommit)
- def test_autocommit_setter_transaction_started_commited_rolled_back(self):
+ @mock.patch("google.cloud.spanner_dbapi.connection.Connection.commit")
+ def test_autocommit_setter_transaction_started_commited_rolled_back(
+ self, mock_commit
+ ):
connection = self._make_connection()
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.commit"
- ) as mock_commit:
- connection._transaction = mock.Mock(committed=True, rolled_back=False)
+ connection._transaction = mock.Mock(committed=True, rolled_back=False)
- connection.autocommit = True
- mock_commit.assert_not_called()
- self.assertTrue(connection._autocommit)
+ connection.autocommit = True
+ mock_commit.assert_not_called()
+ self.assertTrue(connection._autocommit)
connection.autocommit = False
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.commit"
- ) as mock_commit:
- connection._transaction = mock.Mock(committed=False, rolled_back=True)
+ connection._transaction = mock.Mock(committed=False, rolled_back=True)
- connection.autocommit = True
- mock_commit.assert_not_called()
- self.assertTrue(connection._autocommit)
+ connection.autocommit = True
+ mock_commit.assert_not_called()
+ self.assertTrue(connection._autocommit)
def test_property_database(self):
from google.cloud.spanner_v1.database import Database
@@ -116,76 +105,92 @@ def test_property_instance(self):
self.assertIsInstance(connection.instance, Instance)
self.assertEqual(connection.instance, connection._instance)
- def test__session_checkout(self):
+ @staticmethod
+ def _make_pool():
+ from google.cloud.spanner_v1.pool import AbstractSessionPool
+
+ return mock.create_autospec(AbstractSessionPool)
+
+ @mock.patch("google.cloud.spanner_v1.database.Database")
+ def test__session_checkout(self, mock_database):
from google.cloud.spanner_dbapi import Connection
- with mock.patch("google.cloud.spanner_v1.database.Database") as mock_database:
- mock_database._pool = mock.MagicMock()
- mock_database._pool.get = mock.MagicMock(return_value="db_session_pool")
- connection = Connection(self.INSTANCE, mock_database)
+ pool = self._make_pool()
+ mock_database._pool = pool
+ connection = Connection(INSTANCE, mock_database)
- connection._session_checkout()
- mock_database._pool.get.assert_called_once_with()
- self.assertEqual(connection._session, "db_session_pool")
+ connection._session_checkout()
+ pool.get.assert_called_once_with()
+ self.assertEqual(connection._session, pool.get.return_value)
- connection._session = "db_session"
- connection._session_checkout()
- self.assertEqual(connection._session, "db_session")
+ connection._session = "db_session"
+ connection._session_checkout()
+ self.assertEqual(connection._session, "db_session")
- def test__release_session(self):
+ @mock.patch("google.cloud.spanner_v1.database.Database")
+ def test__release_session(self, mock_database):
from google.cloud.spanner_dbapi import Connection
- with mock.patch("google.cloud.spanner_v1.database.Database") as mock_database:
- mock_database._pool = mock.MagicMock()
- mock_database._pool.put = mock.MagicMock()
- connection = Connection(self.INSTANCE, mock_database)
- connection._session = "session"
+ pool = self._make_pool()
+ mock_database._pool = pool
+ connection = Connection(INSTANCE, mock_database)
+ connection._session = "session"
- connection._release_session()
- mock_database._pool.put.assert_called_once_with("session")
- self.assertIsNone(connection._session)
+ connection._release_session()
+ pool.put.assert_called_once_with("session")
+ self.assertIsNone(connection._session)
def test_transaction_checkout(self):
from google.cloud.spanner_dbapi import Connection
- connection = Connection(self.INSTANCE, self.DATABASE)
- connection._session_checkout = mock_checkout = mock.MagicMock(autospec=True)
+ connection = Connection(INSTANCE, DATABASE)
+ mock_checkout = mock.MagicMock(autospec=True)
+ connection._session_checkout = mock_checkout
+
connection.transaction_checkout()
+
mock_checkout.assert_called_once_with()
- connection._transaction = mock_transaction = mock.MagicMock()
+ mock_transaction = mock.MagicMock()
mock_transaction.committed = mock_transaction.rolled_back = False
+ connection._transaction = mock_transaction
+
self.assertEqual(connection.transaction_checkout(), mock_transaction)
connection._autocommit = True
self.assertIsNone(connection.transaction_checkout())
- def test_close(self):
- from google.cloud.spanner_dbapi import connect, InterfaceError
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_close(self, mock_client):
+ from google.cloud.spanner_dbapi import connect
+ from google.cloud.spanner_dbapi import InterfaceError
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
self.assertFalse(connection.is_closed)
+
connection.close()
+
self.assertTrue(connection.is_closed)
with self.assertRaises(InterfaceError):
connection.cursor()
- connection._transaction = mock_transaction = mock.MagicMock()
+ mock_transaction = mock.MagicMock()
mock_transaction.committed = mock_transaction.rolled_back = False
- mock_transaction.rollback = mock_rollback = mock.MagicMock()
+ connection._transaction = mock_transaction
+
+ mock_rollback = mock.MagicMock()
+ mock_transaction.rollback = mock_rollback
+
connection.close()
+
mock_rollback.assert_called_once_with()
+
connection._transaction = mock.MagicMock()
connection._own_pool = False
connection.close()
+
self.assertTrue(connection.is_closed)
@mock.patch.object(warnings, "warn")
@@ -193,13 +198,14 @@ def test_commit(self, mock_warn):
from google.cloud.spanner_dbapi import Connection
from google.cloud.spanner_dbapi.connection import AUTOCOMMIT_MODE_WARNING
- connection = Connection(self.INSTANCE, self.DATABASE)
+ connection = Connection(INSTANCE, DATABASE)
with mock.patch(
"google.cloud.spanner_dbapi.connection.Connection._release_session"
) as mock_release:
connection.commit()
- mock_release.assert_not_called()
+
+ mock_release.assert_not_called()
connection._transaction = mock_transaction = mock.MagicMock(
rolled_back=False, committed=False
@@ -210,8 +216,9 @@ def test_commit(self, mock_warn):
"google.cloud.spanner_dbapi.connection.Connection._release_session"
) as mock_release:
connection.commit()
- mock_commit.assert_called_once_with()
- mock_release.assert_called_once_with()
+
+ mock_commit.assert_called_once_with()
+ mock_release.assert_called_once_with()
connection._autocommit = True
connection.commit()
@@ -224,23 +231,27 @@ def test_rollback(self, mock_warn):
from google.cloud.spanner_dbapi import Connection
from google.cloud.spanner_dbapi.connection import AUTOCOMMIT_MODE_WARNING
- connection = Connection(self.INSTANCE, self.DATABASE)
+ connection = Connection(INSTANCE, DATABASE)
with mock.patch(
"google.cloud.spanner_dbapi.connection.Connection._release_session"
) as mock_release:
connection.rollback()
- mock_release.assert_not_called()
- connection._transaction = mock_transaction = mock.MagicMock()
- mock_transaction.rollback = mock_rollback = mock.MagicMock()
+ mock_release.assert_not_called()
+
+ mock_transaction = mock.MagicMock()
+ connection._transaction = mock_transaction
+ mock_rollback = mock.MagicMock()
+ mock_transaction.rollback = mock_rollback
with mock.patch(
"google.cloud.spanner_dbapi.connection.Connection._release_session"
) as mock_release:
connection.rollback()
- mock_rollback.assert_called_once_with()
- mock_release.assert_called_once_with()
+
+ mock_rollback.assert_called_once_with()
+ mock_release.assert_called_once_with()
connection._autocommit = True
connection.rollback()
@@ -248,101 +259,34 @@ def test_rollback(self, mock_warn):
AUTOCOMMIT_MODE_WARNING, UserWarning, stacklevel=2
)
- def test_run_prior_DDL_statements(self):
+ @mock.patch("google.cloud.spanner_v1.database.Database", autospec=True)
+ def test_run_prior_DDL_statements(self, mock_database):
from google.cloud.spanner_dbapi import Connection, InterfaceError
- with mock.patch(
- "google.cloud.spanner_v1.database.Database", autospec=True
- ) as mock_database:
- connection = Connection(self.INSTANCE, mock_database)
+ connection = Connection(INSTANCE, mock_database)
- connection.run_prior_DDL_statements()
- mock_database.update_ddl.assert_not_called()
+ connection.run_prior_DDL_statements()
+ mock_database.update_ddl.assert_not_called()
- ddl = ["ddl"]
- connection._ddl_statements = ddl
+ ddl = ["ddl"]
+ connection._ddl_statements = ddl
- connection.run_prior_DDL_statements()
- mock_database.update_ddl.assert_called_once_with(ddl)
+ connection.run_prior_DDL_statements()
+ mock_database.update_ddl.assert_called_once_with(ddl)
- connection.is_closed = True
+ connection.is_closed = True
- with self.assertRaises(InterfaceError):
- connection.run_prior_DDL_statements()
+ with self.assertRaises(InterfaceError):
+ connection.run_prior_DDL_statements()
- def test_context(self):
+ def test_as_context_manager(self):
connection = self._make_connection()
with connection as conn:
self.assertEqual(conn, connection)
self.assertTrue(connection.is_closed)
- def test_connect(self):
- from google.cloud.spanner_dbapi import Connection, connect
-
- with mock.patch("google.cloud.spanner_v1.Client"):
- with mock.patch(
- "google.api_core.gapic_v1.client_info.ClientInfo",
- return_value=self._get_client_info(),
- ):
- connection = connect(
- self.INSTANCE,
- self.DATABASE,
- self.PROJECT,
- self.CREDENTIALS,
- self.USER_AGENT,
- )
- self.assertIsInstance(connection, Connection)
-
- def test_connect_instance_not_found(self):
- from google.cloud.spanner_dbapi import connect
-
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=False
- ):
- with self.assertRaises(ValueError):
- connect("test-instance", "test-database")
-
- def test_connect_database_not_found(self):
- from google.cloud.spanner_dbapi import connect
-
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=False
- ):
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True
- ):
- with self.assertRaises(ValueError):
- connect("test-instance", "test-database")
-
- def test_default_sessions_pool(self):
- from google.cloud.spanner_dbapi import connect
-
- with mock.patch("google.cloud.spanner_v1.instance.Instance.database"):
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True
- ):
- connection = connect("test-instance", "test-database")
-
- self.assertIsNotNone(connection.database._pool)
-
- def test_sessions_pool(self):
- from google.cloud.spanner_dbapi import connect
- from google.cloud.spanner_v1.pool import FixedSizePool
-
- database_id = "test-database"
- pool = FixedSizePool()
-
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.database"
- ) as database_mock:
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True
- ):
- connect("test-instance", database_id, pool=pool)
- database_mock.assert_called_once_with(database_id, pool=pool)
-
- def test_run_statement_remember_statements(self):
+ def test_run_statement_wo_retried(self):
"""Check that Connection remembers executed statements."""
from google.cloud.spanner_dbapi.checksum import ResultsChecksum
from google.cloud.spanner_dbapi.cursor import Statement
@@ -352,19 +296,16 @@ def test_run_statement_remember_statements(self):
param_types = {"a1": str}
connection = self._make_connection()
-
+ connection.transaction_checkout = mock.Mock()
statement = Statement(sql, params, param_types, ResultsChecksum(), False)
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.transaction_checkout"
- ):
- connection.run_statement(statement)
+ connection.run_statement(statement)
self.assertEqual(connection._statements[0].sql, sql)
self.assertEqual(connection._statements[0].params, params)
self.assertEqual(connection._statements[0].param_types, param_types)
self.assertIsInstance(connection._statements[0].checksum, ResultsChecksum)
- def test_run_statement_dont_remember_retried_statements(self):
+ def test_run_statement_w_retried(self):
"""Check that Connection doesn't remember re-executed statements."""
from google.cloud.spanner_dbapi.checksum import ResultsChecksum
from google.cloud.spanner_dbapi.cursor import Statement
@@ -374,12 +315,9 @@ def test_run_statement_dont_remember_retried_statements(self):
param_types = {"a1": str}
connection = self._make_connection()
-
+ connection.transaction_checkout = mock.Mock()
statement = Statement(sql, params, param_types, ResultsChecksum(), False)
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.transaction_checkout"
- ):
- connection.run_statement(statement, retried=True)
+ connection.run_statement(statement, retried=True)
self.assertEqual(len(connection._statements), 0)
@@ -393,12 +331,10 @@ def test_run_statement_w_heterogenous_insert_statements(self):
param_types = None
connection = self._make_connection()
-
+ connection.transaction_checkout = mock.Mock()
statement = Statement(sql, params, param_types, ResultsChecksum(), True)
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.transaction_checkout"
- ):
- connection.run_statement(statement, retried=True)
+
+ connection.run_statement(statement, retried=True)
self.assertEqual(len(connection._statements), 0)
@@ -412,16 +348,15 @@ def test_run_statement_w_homogeneous_insert_statements(self):
param_types = {"f1": str, "f2": str}
connection = self._make_connection()
-
+ connection.transaction_checkout = mock.Mock()
statement = Statement(sql, params, param_types, ResultsChecksum(), True)
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.transaction_checkout"
- ):
- connection.run_statement(statement, retried=True)
+
+ connection.run_statement(statement, retried=True)
self.assertEqual(len(connection._statements), 0)
- def test_clear_statements_on_commit(self):
+ @mock.patch("google.cloud.spanner_v1.transaction.Transaction")
+ def test_commit_clears_statements(self, mock_transaction):
"""
Check that all the saved statements are
cleared, when the transaction is commited.
@@ -432,12 +367,12 @@ def test_clear_statements_on_commit(self):
self.assertEqual(len(connection._statements), 2)
- with mock.patch("google.cloud.spanner_v1.transaction.Transaction.commit"):
- connection.commit()
+ connection.commit()
self.assertEqual(len(connection._statements), 0)
- def test_clear_statements_on_rollback(self):
+ @mock.patch("google.cloud.spanner_v1.transaction.Transaction")
+ def test_rollback_clears_statements(self, mock_transaction):
"""
Check that all the saved statements are
cleared, when the transaction is roll backed.
@@ -448,40 +383,36 @@ def test_clear_statements_on_rollback(self):
self.assertEqual(len(connection._statements), 2)
- with mock.patch("google.cloud.spanner_v1.transaction.Transaction.commit"):
- connection.rollback()
+ connection.rollback()
self.assertEqual(len(connection._statements), 0)
- def test_retry_transaction(self):
+ def test_retry_transaction_w_checksum_match(self):
"""Check retrying an aborted transaction."""
from google.cloud.spanner_dbapi.checksum import ResultsChecksum
from google.cloud.spanner_dbapi.cursor import Statement
row = ["field1", "field2"]
connection = self._make_connection()
-
checksum = ResultsChecksum()
checksum.consume_result(row)
+
retried_checkum = ResultsChecksum()
+ run_mock = connection.run_statement = mock.Mock()
+ run_mock.return_value = ([row], retried_checkum)
statement = Statement("SELECT 1", [], {}, checksum, False)
connection._statements.append(statement)
with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.run_statement",
- return_value=([row], retried_checkum),
- ) as run_mock:
- with mock.patch(
- "google.cloud.spanner_dbapi.connection._compare_checksums"
- ) as compare_mock:
- connection.retry_transaction()
-
- compare_mock.assert_called_with(checksum, retried_checkum)
+ "google.cloud.spanner_dbapi.connection._compare_checksums"
+ ) as compare_mock:
+ connection.retry_transaction()
- run_mock.assert_called_with(statement, retried=True)
+ compare_mock.assert_called_with(checksum, retried_checkum)
+ run_mock.assert_called_with(statement, retried=True)
- def test_retry_transaction_checksum_mismatch(self):
+ def test_retry_transaction_w_checksum_mismatch(self):
"""
Check retrying an aborted transaction
with results checksums mismatch.
@@ -497,18 +428,17 @@ def test_retry_transaction_checksum_mismatch(self):
checksum = ResultsChecksum()
checksum.consume_result(row)
retried_checkum = ResultsChecksum()
+ run_mock = connection.run_statement = mock.Mock()
+ run_mock.return_value = ([retried_row], retried_checkum)
statement = Statement("SELECT 1", [], {}, checksum, False)
connection._statements.append(statement)
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.run_statement",
- return_value=([retried_row], retried_checkum),
- ):
- with self.assertRaises(RetryAborted):
- connection.retry_transaction()
+ with self.assertRaises(RetryAborted):
+ connection.retry_transaction()
- def test_commit_retry_aborted_statements(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_commit_retry_aborted_statements(self, mock_client):
"""Check that retried transaction executing the same statements."""
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.checksum import ResultsChecksum
@@ -516,13 +446,8 @@ def test_commit_retry_aborted_statements(self):
from google.cloud.spanner_dbapi.cursor import Statement
row = ["field1", "field2"]
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -530,19 +455,15 @@ def test_commit_retry_aborted_statements(self):
statement = Statement("SELECT 1", [], {}, cursor._checksum, False)
connection._statements.append(statement)
- connection._transaction = mock.Mock(rolled_back=False, committed=False)
+ mock_transaction = mock.Mock(rolled_back=False, committed=False)
+ connection._transaction = mock_transaction
+ mock_transaction.commit.side_effect = [Aborted("Aborted"), None]
+ run_mock = connection.run_statement = mock.Mock()
+ run_mock.return_value = ([row], ResultsChecksum())
- with mock.patch.object(
- connection._transaction, "commit", side_effect=(Aborted("Aborted"), None),
- ):
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.run_statement",
- return_value=([row], ResultsChecksum()),
- ) as run_mock:
-
- connection.commit()
+ connection.commit()
- run_mock.assert_called_with(statement, retried=True)
+ run_mock.assert_called_with(statement, retried=True)
def test_retry_transaction_drop_transaction(self):
"""
@@ -558,7 +479,8 @@ def test_retry_transaction_drop_transaction(self):
connection.retry_transaction()
self.assertIsNone(connection._transaction)
- def test_retry_aborted_retry(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_retry_aborted_retry(self, mock_client):
"""
Check that in case of a retried transaction failed,
the connection will retry it once again.
@@ -570,13 +492,7 @@ def test_retry_aborted_retry(self):
row = ["field1", "field2"]
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -584,27 +500,19 @@ def test_retry_aborted_retry(self):
statement = Statement("SELECT 1", [], {}, cursor._checksum, False)
connection._statements.append(statement)
-
metadata_mock = mock.Mock()
metadata_mock.trailing_metadata.return_value = {}
+ run_mock = connection.run_statement = mock.Mock()
+ run_mock.side_effect = [
+ Aborted("Aborted", errors=[metadata_mock]),
+ ([row], ResultsChecksum()),
+ ]
- with mock.patch.object(
- connection,
- "run_statement",
- side_effect=(
- Aborted("Aborted", errors=[metadata_mock]),
- ([row], ResultsChecksum()),
- ),
- ) as retry_mock:
-
- connection.retry_transaction()
+ connection.retry_transaction()
- retry_mock.assert_has_calls(
- (
- mock.call(statement, retried=True),
- mock.call(statement, retried=True),
- )
- )
+ run_mock.assert_has_calls(
+ (mock.call(statement, retried=True), mock.call(statement, retried=True),)
+ )
def test_retry_transaction_raise_max_internal_retries(self):
"""Check retrying raise an error of max internal retries."""
@@ -627,7 +535,8 @@ def test_retry_transaction_raise_max_internal_retries(self):
conn.MAX_INTERNAL_RETRIES = 50
- def test_retry_aborted_retry_without_delay(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_retry_aborted_retry_without_delay(self, mock_client):
"""
Check that in case of a retried transaction failed,
the connection will retry it once again.
@@ -639,13 +548,7 @@ def test_retry_aborted_retry_without_delay(self):
row = ["field1", "field2"]
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -653,29 +556,20 @@ def test_retry_aborted_retry_without_delay(self):
statement = Statement("SELECT 1", [], {}, cursor._checksum, False)
connection._statements.append(statement)
-
metadata_mock = mock.Mock()
metadata_mock.trailing_metadata.return_value = {}
+ run_mock = connection.run_statement = mock.Mock()
+ run_mock.side_effect = [
+ Aborted("Aborted", errors=[metadata_mock]),
+ ([row], ResultsChecksum()),
+ ]
+ connection._get_retry_delay = mock.Mock(return_value=False)
- with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.run_statement",
- side_effect=(
- Aborted("Aborted", errors=[metadata_mock]),
- ([row], ResultsChecksum()),
- ),
- ) as retry_mock:
- with mock.patch(
- "google.cloud.spanner_dbapi.connection._get_retry_delay",
- return_value=False,
- ):
- connection.retry_transaction()
-
- retry_mock.assert_has_calls(
- (
- mock.call(statement, retried=True),
- mock.call(statement, retried=True),
- )
- )
+ connection.retry_transaction()
+
+ run_mock.assert_has_calls(
+ (mock.call(statement, retried=True), mock.call(statement, retried=True),)
+ )
def test_retry_transaction_w_multiple_statement(self):
"""Check retrying an aborted transaction."""
@@ -693,19 +587,17 @@ def test_retry_transaction_w_multiple_statement(self):
statement1 = Statement("SELECT 2", [], {}, checksum, False)
connection._statements.append(statement)
connection._statements.append(statement1)
+ run_mock = connection.run_statement = mock.Mock()
+ run_mock.return_value = ([row], retried_checkum)
with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.run_statement",
- return_value=([row], retried_checkum),
- ) as run_mock:
- with mock.patch(
- "google.cloud.spanner_dbapi.connection._compare_checksums"
- ) as compare_mock:
- connection.retry_transaction()
+ "google.cloud.spanner_dbapi.connection._compare_checksums"
+ ) as compare_mock:
+ connection.retry_transaction()
- compare_mock.assert_called_with(checksum, retried_checkum)
+ compare_mock.assert_called_with(checksum, retried_checkum)
- run_mock.assert_called_with(statement1, retried=True)
+ run_mock.assert_called_with(statement1, retried=True)
def test_retry_transaction_w_empty_response(self):
"""Check retrying an aborted transaction."""
@@ -721,16 +613,14 @@ def test_retry_transaction_w_empty_response(self):
statement = Statement("SELECT 1", [], {}, checksum, False)
connection._statements.append(statement)
+ run_mock = connection.run_statement = mock.Mock()
+ run_mock.return_value = ([row], retried_checkum)
with mock.patch(
- "google.cloud.spanner_dbapi.connection.Connection.run_statement",
- return_value=(row, retried_checkum),
- ) as run_mock:
- with mock.patch(
- "google.cloud.spanner_dbapi.connection._compare_checksums"
- ) as compare_mock:
- connection.retry_transaction()
+ "google.cloud.spanner_dbapi.connection._compare_checksums"
+ ) as compare_mock:
+ connection.retry_transaction()
- compare_mock.assert_called_with(checksum, retried_checkum)
+ compare_mock.assert_called_with(checksum, retried_checkum)
- run_mock.assert_called_with(statement, retried=True)
+ run_mock.assert_called_with(statement, retried=True)
diff --git a/tests/unit/spanner_dbapi/test_cursor.py b/tests/unit/spanner_dbapi/test_cursor.py
index 5b1cf12138..d1a20c2ed2 100644
--- a/tests/unit/spanner_dbapi/test_cursor.py
+++ b/tests/unit/spanner_dbapi/test_cursor.py
@@ -70,16 +70,11 @@ def test_callproc(self):
with self.assertRaises(InterfaceError):
cursor.callproc(procname=None)
- def test_close(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_close(self, mock_client):
from google.cloud.spanner_dbapi import connect, InterfaceError
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True
- ):
- connection = connect(self.INSTANCE, self.DATABASE)
+ connection = connect(self.INSTANCE, self.DATABASE)
cursor = connection.cursor()
self.assertFalse(cursor.is_closed)
@@ -87,6 +82,7 @@ def test_close(self):
cursor.close()
self.assertTrue(cursor.is_closed)
+
with self.assertRaises(InterfaceError):
cursor.execute("SELECT * FROM database")
@@ -276,17 +272,12 @@ def test_execute_internal_server_error(self):
with self.assertRaises(OperationalError):
cursor.execute(sql="sql")
- def test_executemany_on_closed_cursor(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_executemany_on_closed_cursor(self, mock_client):
from google.cloud.spanner_dbapi import InterfaceError
from google.cloud.spanner_dbapi import connect
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor.close()
@@ -294,35 +285,25 @@ def test_executemany_on_closed_cursor(self):
with self.assertRaises(InterfaceError):
cursor.executemany("""SELECT * FROM table1 WHERE "col1" = @a1""", ())
- def test_executemany_DLL(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_executemany_DLL(self, mock_client):
from google.cloud.spanner_dbapi import connect, ProgrammingError
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
with self.assertRaises(ProgrammingError):
cursor.executemany("""DROP DATABASE database_name""", ())
- def test_executemany(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_executemany(self, mock_client):
from google.cloud.spanner_dbapi import connect
operation = """SELECT * FROM table1 WHERE "col1" = @a1"""
params_seq = ((1,), (2,))
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._result_set = [1, 2, 3]
@@ -561,7 +542,8 @@ def test_get_table_column_schema(self):
)
self.assertEqual(result, expected)
- def test_peek_iterator_aborted(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_peek_iterator_aborted(self, mock_client):
"""
Checking that an Aborted exception is retried in case it happened
while streaming the first element with a PeekIterator.
@@ -569,13 +551,7 @@ def test_peek_iterator_aborted(self):
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.connection import connect
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
with mock.patch(
@@ -593,7 +569,8 @@ def test_peek_iterator_aborted(self):
retry_mock.assert_called_with()
- def test_peek_iterator_aborted_autocommit(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_peek_iterator_aborted_autocommit(self, mock_client):
"""
Checking that an Aborted exception is retried in case it happened while
streaming the first element with a PeekIterator in autocommit mode.
@@ -601,13 +578,7 @@ def test_peek_iterator_aborted_autocommit(self):
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.connection import connect
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
connection.autocommit = True
cursor = connection.cursor()
@@ -629,19 +600,14 @@ def test_peek_iterator_aborted_autocommit(self):
retry_mock.assert_called_with()
- def test_fetchone_retry_aborted(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_fetchone_retry_aborted(self, mock_client):
"""Check that aborted fetch re-executing transaction."""
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.checksum import ResultsChecksum
from google.cloud.spanner_dbapi.connection import connect
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -658,7 +624,8 @@ def test_fetchone_retry_aborted(self):
retry_mock.assert_called_with()
- def test_fetchone_retry_aborted_statements(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_fetchone_retry_aborted_statements(self, mock_client):
"""Check that retried transaction executing the same statements."""
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.checksum import ResultsChecksum
@@ -666,13 +633,7 @@ def test_fetchone_retry_aborted_statements(self):
from google.cloud.spanner_dbapi.cursor import Statement
row = ["field1", "field2"]
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -694,7 +655,8 @@ def test_fetchone_retry_aborted_statements(self):
run_mock.assert_called_with(statement, retried=True)
- def test_fetchone_retry_aborted_statements_checksums_mismatch(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_fetchone_retry_aborted_statements_checksums_mismatch(self, mock_client):
"""Check transaction retrying with underlying data being changed."""
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.exceptions import RetryAborted
@@ -705,13 +667,7 @@ def test_fetchone_retry_aborted_statements_checksums_mismatch(self):
row = ["field1", "field2"]
row2 = ["updated_field1", "field2"]
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -734,19 +690,14 @@ def test_fetchone_retry_aborted_statements_checksums_mismatch(self):
run_mock.assert_called_with(statement, retried=True)
- def test_fetchall_retry_aborted(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_fetchall_retry_aborted(self, mock_client):
"""Check that aborted fetch re-executing transaction."""
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.checksum import ResultsChecksum
from google.cloud.spanner_dbapi.connection import connect
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -763,7 +714,8 @@ def test_fetchall_retry_aborted(self):
retry_mock.assert_called_with()
- def test_fetchall_retry_aborted_statements(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_fetchall_retry_aborted_statements(self, mock_client):
"""Check that retried transaction executing the same statements."""
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.checksum import ResultsChecksum
@@ -771,13 +723,7 @@ def test_fetchall_retry_aborted_statements(self):
from google.cloud.spanner_dbapi.cursor import Statement
row = ["field1", "field2"]
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -798,7 +744,8 @@ def test_fetchall_retry_aborted_statements(self):
run_mock.assert_called_with(statement, retried=True)
- def test_fetchall_retry_aborted_statements_checksums_mismatch(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_fetchall_retry_aborted_statements_checksums_mismatch(self, mock_client):
"""Check transaction retrying with underlying data being changed."""
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.exceptions import RetryAborted
@@ -809,13 +756,7 @@ def test_fetchall_retry_aborted_statements_checksums_mismatch(self):
row = ["field1", "field2"]
row2 = ["updated_field1", "field2"]
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -838,19 +779,14 @@ def test_fetchall_retry_aborted_statements_checksums_mismatch(self):
run_mock.assert_called_with(statement, retried=True)
- def test_fetchmany_retry_aborted(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_fetchmany_retry_aborted(self, mock_client):
"""Check that aborted fetch re-executing transaction."""
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.checksum import ResultsChecksum
from google.cloud.spanner_dbapi.connection import connect
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -867,7 +803,8 @@ def test_fetchmany_retry_aborted(self):
retry_mock.assert_called_with()
- def test_fetchmany_retry_aborted_statements(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_fetchmany_retry_aborted_statements(self, mock_client):
"""Check that retried transaction executing the same statements."""
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.checksum import ResultsChecksum
@@ -875,13 +812,7 @@ def test_fetchmany_retry_aborted_statements(self):
from google.cloud.spanner_dbapi.cursor import Statement
row = ["field1", "field2"]
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -903,7 +834,8 @@ def test_fetchmany_retry_aborted_statements(self):
run_mock.assert_called_with(statement, retried=True)
- def test_fetchmany_retry_aborted_statements_checksums_mismatch(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_fetchmany_retry_aborted_statements_checksums_mismatch(self, mock_client):
"""Check transaction retrying with underlying data being changed."""
from google.api_core.exceptions import Aborted
from google.cloud.spanner_dbapi.exceptions import RetryAborted
@@ -914,13 +846,7 @@ def test_fetchmany_retry_aborted_statements_checksums_mismatch(self):
row = ["field1", "field2"]
row2 = ["updated_field1", "field2"]
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor._checksum = ResultsChecksum()
@@ -943,7 +869,8 @@ def test_fetchmany_retry_aborted_statements_checksums_mismatch(self):
run_mock.assert_called_with(statement, retried=True)
- def test_ddls_with_semicolon(self):
+ @mock.patch("google.cloud.spanner_v1.Client")
+ def test_ddls_with_semicolon(self, mock_client):
"""
Check that one script with several DDL statements separated
with semicolons is splitted into several DDLs.
@@ -963,13 +890,7 @@ def test_ddls_with_semicolon(self):
"DROP TABLE table_name",
]
- with mock.patch(
- "google.cloud.spanner_v1.instance.Instance.exists", return_value=True,
- ):
- with mock.patch(
- "google.cloud.spanner_v1.database.Database.exists", return_value=True,
- ):
- connection = connect("test-instance", "test-database")
+ connection = connect("test-instance", "test-database")
cursor = connection.cursor()
cursor.execute(
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index 2777fbc9a0..68d8ea6857 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -40,6 +40,7 @@ class TestClient(unittest.TestCase):
PROCESSING_UNITS = 5000
LABELS = {"test": "true"}
TIMEOUT_SECONDS = 80
+ LEADER_OPTIONS = ["leader1", "leader2"]
def _get_target_class(self):
from google.cloud import spanner
@@ -457,7 +458,9 @@ def test_list_instance_configs(self):
instance_config_pbs = ListInstanceConfigsResponse(
instance_configs=[
InstanceConfigPB(
- name=self.CONFIGURATION_NAME, display_name=self.DISPLAY_NAME
+ name=self.CONFIGURATION_NAME,
+ display_name=self.DISPLAY_NAME,
+ leader_options=self.LEADER_OPTIONS,
)
]
)
@@ -473,6 +476,7 @@ def test_list_instance_configs(self):
self.assertIsInstance(instance_config, InstanceConfigPB)
self.assertEqual(instance_config.name, self.CONFIGURATION_NAME)
self.assertEqual(instance_config.display_name, self.DISPLAY_NAME)
+ self.assertEqual(instance_config.leader_options, self.LEADER_OPTIONS)
expected_metadata = (
("google-cloud-resource-prefix", client.project_name),
diff --git a/tests/unit/test_database.py b/tests/unit/test_database.py
index 05e6f2b422..a4b7aa2425 100644
--- a/tests/unit/test_database.py
+++ b/tests/unit/test_database.py
@@ -333,6 +333,13 @@ def test_encryption_info(self):
]
self.assertEqual(database.encryption_info, encryption_info)
+ def test_default_leader(self):
+ instance = _Instance(self.INSTANCE_NAME)
+ pool = _Pool()
+ database = self._make_one(self.DATABASE_ID, instance, pool=pool)
+ default_leader = database._default_leader = "us-east4"
+ self.assertEqual(database.default_leader, default_leader)
+
def test_spanner_api_property_w_scopeless_creds(self):
client = _Client()
@@ -715,6 +722,7 @@ def test_reload_success(self):
kms_key_version="kms_key_version",
)
]
+ default_leader = "us-east4"
api = client.database_admin_api = self._make_database_admin_api()
api.get_database_ddl.return_value = ddl_pb
db_pb = Database(
@@ -725,6 +733,7 @@ def test_reload_success(self):
earliest_version_time=_datetime_to_pb_timestamp(timestamp),
encryption_config=encryption_config,
encryption_info=encryption_info,
+ default_leader=default_leader,
)
api.get_database.return_value = db_pb
instance = _Instance(self.INSTANCE_NAME, client=client)
@@ -740,6 +749,7 @@ def test_reload_success(self):
self.assertEqual(database._ddl_statements, tuple(DDL_STATEMENTS))
self.assertEqual(database._encryption_config, encryption_config)
self.assertEqual(database._encryption_info, encryption_info)
+ self.assertEqual(database._default_leader, default_leader)
api.get_database_ddl.assert_called_once_with(
database=self.DATABASE_NAME,
diff --git a/tests/unit/test_streamed.py b/tests/unit/test_streamed.py
index 66d6f34e2e..de0c8875bf 100644
--- a/tests/unit/test_streamed.py
+++ b/tests/unit/test_streamed.py
@@ -164,6 +164,19 @@ def test__merge_chunk_bool(self):
with self.assertRaises(Unmergeable):
streamed._merge_chunk(chunk)
+ def test__merge_chunk_numeric(self):
+ from google.cloud.spanner_v1 import TypeCode
+
+ iterator = _MockCancellableIterator()
+ streamed = self._make_one(iterator)
+ FIELDS = [self._make_scalar_field("total", TypeCode.NUMERIC)]
+ streamed._metadata = self._make_result_set_metadata(FIELDS)
+ streamed._pending_chunk = self._make_value(u"1234.")
+ chunk = self._make_value(u"5678")
+
+ merged = streamed._merge_chunk(chunk)
+ self.assertEqual(merged.string_value, u"1234.5678")
+
def test__merge_chunk_int64(self):
from google.cloud.spanner_v1 import TypeCode