diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 9ee60f7e4..c07f148f0 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/repo-automation-bots/owlbot-python:latest - digest: sha256:aea14a583128771ae8aefa364e1652f3c56070168ef31beb203534222d842b8b + digest: sha256:0ffe3bdd6c7159692df5f7744da74e5ef19966288a6bf76023e8e04e0c424d7d diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 28f71bc30..81373412a 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -1,9 +1,9 @@ -# https://github.com/googleapis/repo-automation-bots/tree/master/packages/sync-repo-settings -# Rules for master branch protection +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings +# Rules for main branch protection branchProtectionRules: # Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `master` -- pattern: master +# Defaults to `main` +- pattern: main requiresCodeOwnerReviews: true requiresStrictStatusChecks: true requiredStatusCheckContexts: diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 9f144307d..500351238 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -41,7 +41,7 @@ python3 -m pip install --upgrade --quiet nox python3 -m nox --version # If this is a continuous build, send the test log to the FlakyBot. -# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +# See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then cleanup() { chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 311a8d54b..8a324c9c7 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -80,7 +80,7 @@ for file in samples/**/requirements.txt; do EXIT=$? # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot $KOKORO_GFILE_DIR/linux_amd64/flakybot diff --git a/CHANGELOG.md b/CHANGELOG.md index e4012c2ec..e98af1181 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,18 @@ [1]: https://pypi.org/project/google-cloud-storage/#history +### [1.42.1](https://www.github.com/googleapis/python-storage/compare/v1.42.0...v1.42.1) (2021-09-07) + + +### Bug Fixes + +* do not append duplicates to user agent string ([#570](https://www.github.com/googleapis/python-storage/issues/570)) ([57cf3a1](https://www.github.com/googleapis/python-storage/commit/57cf3a1f27292939ed097ef8afa3f4392c4b83e0)) + + +### Documentation + +* pass explicit 'client' in '{Blob.Bucket}.from_string' examples ([#545](https://www.github.com/googleapis/python-storage/issues/545)) ([6eff22d](https://www.github.com/googleapis/python-storage/commit/6eff22db0e8c8689208ee52fa815f3ea00675094)) + ## [1.42.0](https://www.github.com/googleapis/python-storage/compare/v1.41.1...v1.42.0) (2021-08-05) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7cf1283e7..5352f2953 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -50,9 +50,9 @@ You'll have to create a development environment using a Git checkout: # Configure remotes such that you can pull changes from the googleapis/python-storage # repository into your local repository. $ git remote add upstream git@github.com:googleapis/python-storage.git - # fetch and merge changes from upstream into master + # fetch and merge changes from upstream into main $ git fetch upstream - $ git merge upstream/master + $ git merge upstream/main Now your local repo is set up such that you will push changes to your GitHub repo, from which you can submit a pull request. @@ -104,12 +104,12 @@ Coding Style variables:: export GOOGLE_CLOUD_TESTING_REMOTE="upstream" - export GOOGLE_CLOUD_TESTING_BRANCH="master" + export GOOGLE_CLOUD_TESTING_BRANCH="main" By doing this, you are specifying the location of the most up-to-date version of ``python-storage``. The the suggested remote name ``upstream`` should point to the official ``googleapis`` checkout and the - the branch should be the main branch on that remote (``master``). + the branch should be the main branch on that remote (``main``). - This repository contains configuration for the `pre-commit `__ tool, which automates checking @@ -190,7 +190,7 @@ The `description on PyPI`_ for the project comes directly from the ``README``. Due to the reStructuredText (``rst``) parser used by PyPI, relative links which will work on GitHub (e.g. ``CONTRIBUTING.rst`` instead of -``https://github.com/googleapis/python-storage/blob/master/CONTRIBUTING.rst``) +``https://github.com/googleapis/python-storage/blob/main/CONTRIBUTING.rst``) may cause problems creating links or rendering the description. .. _description on PyPI: https://pypi.org/project/google-cloud-storage @@ -215,7 +215,7 @@ We support: Supported versions can be found in our ``noxfile.py`` `config`_. -.. _config: https://github.com/googleapis/python-storage/blob/master/noxfile.py +.. _config: https://github.com/googleapis/python-storage/blob/main/noxfile.py Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. diff --git a/README.rst b/README.rst index fd123314b..60b43bae8 100644 --- a/README.rst +++ b/README.rst @@ -12,7 +12,7 @@ via direct download. - `Storage API docs`_ .. |GA| image:: https://img.shields.io/badge/support-GA-gold.svg - :target: https://github.com/googleapis/google-cloud-python/blob/master/README.rst#general-availability + :target: https://github.com/googleapis/google-cloud-python/blob/main/README.rst#general-availability .. |pypi| image:: https://img.shields.io/pypi/v/google-cloud-storage.svg :target: https://pypi.org/project/google-cloud-storage .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-storage.svg diff --git a/docs/conf.py b/docs/conf.py index a25e7b866..fc9d1fd34 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -76,8 +76,8 @@ # The encoding of source files. # source_encoding = 'utf-8-sig' -# The master toctree document. -master_doc = "index" +# The root toctree document. +root_doc = "index" # General information about the project. project = "google-cloud-storage" @@ -110,6 +110,7 @@ # directories to ignore when looking for source files. exclude_patterns = [ "_build", + "**/.nox/**/*", "samples/AUTHORING_GUIDE.md", "samples/CONTRIBUTING.md", "samples/snippets/README.rst", @@ -279,7 +280,7 @@ # author, documentclass [howto, manual, or own class]). latex_documents = [ ( - master_doc, + root_doc, "google-cloud-storage.tex", "google-cloud-storage Documentation", author, @@ -314,7 +315,7 @@ # (source start file, name, description, authors, manual section). man_pages = [ ( - master_doc, + root_doc, "google-cloud-storage", "google-cloud-storage Documentation", [author], @@ -333,7 +334,7 @@ # dir menu entry, description, category) texinfo_documents = [ ( - master_doc, + root_doc, "google-cloud-storage", "google-cloud-storage Documentation", author, diff --git a/google/cloud/storage/_http.py b/google/cloud/storage/_http.py index 0dcc68cdb..6c9d11700 100644 --- a/google/cloud/storage/_http.py +++ b/google/cloud/storage/_http.py @@ -56,7 +56,9 @@ def __init__(self, client, client_info=None, api_endpoint=None): # TODO: When metrics all use gccl, this should be removed #9552 if self._client_info.user_agent is None: # pragma: no branch self._client_info.user_agent = "" - self._client_info.user_agent += " gcloud-python/{} ".format(__version__) + agent_version = "gcloud-python/{}".format(__version__) + if agent_version not in self._client_info.user_agent: + self._client_info.user_agent += " {} ".format(agent_version) API_VERSION = "v1" """The version of the API, used in building the API call's URL.""" diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 1d283ee30..781d6e0a0 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -393,8 +393,8 @@ def from_string(cls, uri, client=None): :type client: :class:`~google.cloud.storage.client.Client` :param client: - (Optional) The client to use. If not passed, falls back to the - ``client`` stored on the blob's bucket. + (Optional) The client to use. Application code should + *always* pass ``client``. :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The blob object created. @@ -405,7 +405,7 @@ def from_string(cls, uri, client=None): >>> from google.cloud import storage >>> from google.cloud.storage.blob import Blob >>> client = storage.Client() - >>> blob = Blob.from_string("gs://bucket/object") + >>> blob = Blob.from_string("gs://bucket/object", client=client) """ from google.cloud.storage.bucket import Bucket @@ -3052,7 +3052,7 @@ def get_iam_policy( ): """Retrieve the IAM policy for the object. - .. note: + .. note:: Blob- / object-level IAM support does not yet exist and methods currently call an internal ACL backend not providing any utility @@ -3120,7 +3120,7 @@ def set_iam_policy( ): """Update the IAM policy for the bucket. - .. note: + .. note:: Blob- / object-level IAM support does not yet exist and methods currently call an internal ACL backend not providing any utility @@ -3177,7 +3177,7 @@ def test_iam_permissions( ): """API call: test permissions - .. note: + .. note:: Blob- / object-level IAM support does not yet exist and methods currently call an internal ACL backend not providing any utility diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 63b2e9a7b..d2af37ac7 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -656,7 +656,8 @@ def from_string(cls, uri, client=None): :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` - :param client: (Optional) The client to use. + :param client: (Optional) The client to use. Application code should + *always* pass ``client``. :rtype: :class:`google.cloud.storage.bucket.Bucket` :returns: The bucket object created. @@ -667,7 +668,7 @@ def from_string(cls, uri, client=None): >>> from google.cloud import storage >>> from google.cloud.storage.bucket import Bucket >>> client = storage.Client() - >>> bucket = Bucket.from_string("gs://bucket", client) + >>> bucket = Bucket.from_string("gs://bucket", client=client) """ scheme, netloc, path, query, frag = urlsplit(uri) diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 7d787a731..bef05ea91 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -221,26 +221,6 @@ def _pop_batch(self): """ return self._batch_stack.pop() - def _bucket_arg_to_bucket(self, bucket_or_name): - """Helper to return given bucket or create new by name. - - Args: - bucket_or_name (Union[ \ - :class:`~google.cloud.storage.bucket.Bucket`, \ - str, \ - ]): - The bucket resource to pass or name to create. - - Returns: - google.cloud.storage.bucket.Bucket - The newly created bucket or the given one. - """ - if isinstance(bucket_or_name, Bucket): - bucket = bucket_or_name - else: - bucket = Bucket(self, name=bucket_or_name) - return bucket - @property def current_batch(self): """Currently-active batch. @@ -682,6 +662,28 @@ def _delete_resource( _target_object=_target_object, ) + def _bucket_arg_to_bucket(self, bucket_or_name): + """Helper to return given bucket or create new by name. + + Args: + bucket_or_name (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + str, \ + ]): + The bucket resource to pass or name to create. + + Returns: + google.cloud.storage.bucket.Bucket + The newly created bucket or the given one. + """ + if isinstance(bucket_or_name, Bucket): + bucket = bucket_or_name + if bucket.client is None: + bucket._client = self + else: + bucket = Bucket(self, name=bucket_or_name) + return bucket + def get_bucket( self, bucket_or_name, @@ -1613,10 +1615,10 @@ def generate_signed_post_policy_v4( Example: Generate signed POST policy and upload a file. + >>> import datetime >>> from google.cloud import storage - >>> import pytz >>> client = storage.Client() - >>> tz = pytz.timezone('America/New_York') + >>> tz = datetime.timezone(datetime.timedelta(hours=1), 'CET') >>> policy = client.generate_signed_post_policy_v4( "bucket-name", "blob-name", diff --git a/google/cloud/storage/version.py b/google/cloud/storage/version.py index c21c39192..31244e9a8 100644 --- a/google/cloud/storage/version.py +++ b/google/cloud/storage/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.42.0" +__version__ = "1.42.1" diff --git a/noxfile.py b/noxfile.py index 2d660eef2..d57287b73 100644 --- a/noxfile.py +++ b/noxfile.py @@ -30,6 +30,9 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] +CONFORMANCE_TEST_PYTHON_VERSIONS = ["3.8"] + +_DEFAULT_STORAGE_HOST = "https://storage.googleapis.com" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -148,6 +151,37 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) +@nox.session(python=CONFORMANCE_TEST_PYTHON_VERSIONS) +def conftest_retry(session): + """Run the retry conformance test suite.""" + conformance_test_path = os.path.join("tests", "conformance.py") + conformance_test_folder_path = os.path.join("tests", "conformance") + + # Environment check: Only run tests if the STORAGE_EMULATOR_HOST is set. + if ( + os.environ.get("STORAGE_EMULATOR_HOST", _DEFAULT_STORAGE_HOST) + == _DEFAULT_STORAGE_HOST + ): + session.skip("Set STORAGE_EMULATOR_HOST to run, skipping") + + conformance_test_exists = os.path.exists(conformance_test_path) + conformance_test_folder_exists = os.path.exists(conformance_test_folder_path) + # Environment check: only run tests if found. + if not conformance_test_exists and not conformance_test_folder_exists: + session.skip("Conformance tests were not found") + + session.install("pytest",) + session.install("-e", ".") + + # Run py.test against the conformance tests. + if conformance_test_exists: + session.run("py.test", "--quiet", conformance_test_path, *session.posargs) + if conformance_test_folder_exists: + session.run( + "py.test", "--quiet", conformance_test_folder_path, *session.posargs + ) + + @nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. diff --git a/owlbot.py b/owlbot.py index be172d47a..2b5e9246b 100644 --- a/owlbot.py +++ b/owlbot.py @@ -14,10 +14,9 @@ """This script is used to synthesize generated parts of this library.""" -import re - import synthtool as s from synthtool import gcp +from synthtool.languages import python common = gcp.CommonTemplates() @@ -33,17 +32,21 @@ # See: https://github.com/googleapis/python-storage/issues/226 "google-cloud-kms < 2.0dev", ], - intersphinx_dependencies = { + intersphinx_dependencies={ "requests": "https://docs.python-requests.org/en/master/" }, ) s.move( - templated_files, excludes=[ + templated_files, + excludes=[ "docs/multiprocessing.rst", "noxfile.py", + "renovate.json", # do not bundle reports "CONTRIBUTING.rst", ], ) +python.py_samples(skip_readmes=True) + s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/renovate.json b/renovate.json index c04895563..9fa8816fe 100644 --- a/renovate.json +++ b/renovate.json @@ -1,6 +1,8 @@ { "extends": [ - "config:base", ":preserveSemverRanges" + "config:base", + ":preserveSemverRanges", + ":disableDependencyDashboard" ], "ignorePaths": [".pre-commit-config.yaml"], "pip_requirements": { diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst index a0406dba8..275d64989 100644 --- a/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -12,7 +12,7 @@ Install Dependencies .. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup -#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. +#. Create a virtualenv. Samples are compatible with Python 3.6+. .. code-block:: bash diff --git a/tests/conformance/__init__.py b/tests/conformance/__init__.py new file mode 100644 index 000000000..bff181aad --- /dev/null +++ b/tests/conformance/__init__.py @@ -0,0 +1,24 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import json +import os + + +def _read_local_json(json_file): + here = os.path.dirname(__file__) + json_path = os.path.abspath(os.path.join(here, json_file)) + with io.open(json_path, "r", encoding="utf-8-sig") as fileobj: + return json.load(fileobj) diff --git a/tests/conformance/retry_strategy_test_data.json b/tests/conformance/retry_strategy_test_data.json new file mode 100644 index 000000000..f37d309e9 --- /dev/null +++ b/tests/conformance/retry_strategy_test_data.json @@ -0,0 +1,99 @@ +{ + "retryStrategyTests": [ + { + "id": 1, + "description": "always_idempotent", + "cases": [ + { + "instructions": ["return-503", "return-503"] + } + ], + "methods": [ + {"name": "storage.bucket_acl.get", "resources": ["BUCKET"]}, + {"name": "storage.bucket_acl.list", "resources": ["BUCKET"]}, + {"name": "storage.buckets.delete", "resources": ["BUCKET"]}, + {"name": "storage.buckets.get", "resources": ["BUCKET"]}, + {"name": "storage.buckets.getIamPolicy", "resources": ["BUCKET"]}, + {"name": "storage.buckets.insert", "resources": []}, + {"name": "storage.buckets.list", "resources": ["BUCKET"]}, + {"name": "storage.buckets.lockRetentionPolicy", "resources": ["BUCKET"]}, + {"name": "storage.buckets.testIamPermissions", "resources": ["BUCKET"]}, + {"name": "storage.default_object_acl.get", "resources": ["BUCKET"]}, + {"name": "storage.default_object_acl.list", "resources": ["BUCKET"]}, + {"name": "storage.hmacKey.delete", "resources": ["HMAC_KEY"]}, + {"name": "storage.hmacKey.get", "resources": ["HMAC_KEY"]}, + {"name": "storage.hmacKey.list", "resources": ["HMAC_KEY"]}, + {"name": "storage.notifications.delete", "resources": ["BUCKET", "NOTIFICATION"]}, + {"name": "storage.notifications.get", "resources": ["BUCKET", "NOTIFICATION"]}, + {"name": "storage.notifications.list", "resources": ["BUCKET", "NOTIFICATION"]}, + {"name": "storage.object_acl.get", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.object_acl.list", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.get", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.list", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.serviceaccount.get", "resources": []} + ], + "preconditionProvided": false, + "expectSuccess": true + }, + { + "id": 2, + "description": "conditionally_idempotent_retries_when_precondition_is_present", + "cases": [ + { + "instructions": ["return-503", "return-503"] + } + ], + "methods": [ + {"name": "storage.buckets.patch", "resources": ["BUCKET"]}, + {"name": "storage.buckets.setIamPolicy", "resources": ["BUCKET"]}, + {"name": "storage.buckets.update", "resources": ["BUCKET"]}, + {"name": "storage.hmacKey.update", "resources": ["HMAC_KEY"]}, + {"name": "storage.objects.compose", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.copy", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.delete", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.insert", "resources": ["BUCKET"]}, + {"name": "storage.objects.patch", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.rewrite", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.update", "resources": ["BUCKET", "OBJECT"]} + ], + "preconditionProvided": true, + "expectSuccess": true + }, + { + "id": 3, + "description": "conditionally_idempotent_no_retries_when_precondition_is_absent", + "cases": [ + { + "instructions": ["return-503"] + } + ], + "methods": [ + {"name": "storage.buckets.patch", "resources": ["BUCKET"]}, + {"name": "storage.buckets.setIamPolicy", "resources": ["BUCKET"]}, + {"name": "storage.buckets.update", "resources": ["BUCKET"]}, + {"name": "storage.hmacKey.update", "resources": ["HMAC_KEY"]}, + {"name": "storage.objects.compose", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.copy", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.delete", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.insert", "resources": ["BUCKET"]}, + {"name": "storage.objects.patch", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.rewrite", "resources": ["BUCKET", "OBJECT"]}, + {"name": "storage.objects.update", "resources": ["BUCKET", "OBJECT"]} + ], + "preconditionProvided": false, + "expectSuccess": false + }, + { + "id": 4, + "description": "non idempotent", + "cases": [ + { + "instructions": [] + } + ], + "methods": [], + "preconditionProvided": false, + "expectSuccess": false + } + ] + } \ No newline at end of file diff --git a/tests/conformance/test_conformance.py b/tests/conformance/test_conformance.py new file mode 100644 index 000000000..468895835 --- /dev/null +++ b/tests/conformance/test_conformance.py @@ -0,0 +1,729 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Conformance tests for retry. Verifies correct behavior around retryable errors, idempotency and preconditions.""" + +import os +import requests +import tempfile +import uuid +import logging +import functools +import pytest + +from google.cloud import storage +from google.auth.credentials import AnonymousCredentials + +from . import _read_local_json + + +_CONFORMANCE_TESTS = _read_local_json("retry_strategy_test_data.json")[ + "retryStrategyTests" +] + +_STORAGE_EMULATOR_ENV_VAR = "STORAGE_EMULATOR_HOST" +"""Environment variable defining host for Storage testbench emulator.""" + +_CONF_TEST_PROJECT_ID = "my-project-id" +_CONF_TEST_SERVICE_ACCOUNT_EMAIL = ( + "my-service-account@my-project-id.iam.gserviceaccount.com" +) + +_STRING_CONTENT = "hello world" +_BYTE_CONTENT = b"12345678" + + +######################################################################################################################################## +### Library methods for mapping ######################################################################################################## +######################################################################################################################################## + + +def bucket_get_blob(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + bucket = client.bucket(bucket.name) + bucket.get_blob(object.name) + + +def blob_exists(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + blob = client.bucket(bucket.name).blob(object.name) + blob.exists() + + +def blob_download_as_bytes(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + blob = client.bucket(bucket.name).blob(object.name) + blob.download_as_bytes() + + +def blob_download_as_text(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + blob = client.bucket(bucket.name).blob(object.name) + blob.download_as_text() + + +def blob_download_to_filename(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + blob = client.bucket(bucket.name).blob(object.name) + with tempfile.NamedTemporaryFile() as temp_f: + blob.download_to_filename(temp_f.name) + + +def client_download_blob_to_file(client, _preconditions, **resources): + object = resources.get("object") + with tempfile.NamedTemporaryFile() as temp_f: + with open(temp_f.name, "wb") as file_obj: + client.download_blob_to_file(object, file_obj) + + +def blobreader_read(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + blob = client.bucket(bucket.name).blob(object.name) + with blob.open() as reader: + reader.read() + + +def client_list_blobs(client, _preconditions, **resources): + bucket = resources.get("bucket") + blobs = client.list_blobs(bucket.name) + for b in blobs: + pass + + +def bucket_list_blobs(client, _preconditions, **resources): + bucket = resources.get("bucket") + blobs = client.bucket(bucket.name).list_blobs() + for b in blobs: + pass + + +def bucket_delete(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + bucket.delete(force=True) + + +def bucket_reload(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + bucket.reload() + + +def client_get_bucket(client, _preconditions, **resources): + client.get_bucket(resources.get("bucket").name) + + +def client_lookup_bucket(client, _preconditions, **resources): + client.lookup_bucket(resources.get("bucket").name) + + +def bucket_exists(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + bucket.exists() + + +def client_create_bucket(client, _preconditions, **_): + bucket = client.bucket(uuid.uuid4().hex) + client.create_bucket(bucket) + + +def bucket_create(client, _preconditions, **_): + bucket = client.bucket(uuid.uuid4().hex) + bucket.create() + + +def client_list_buckets(client, _preconditions, **_): + buckets = client.list_buckets() + for b in buckets: + pass + + +def bucket_get_iam_policy(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + bucket.get_iam_policy() + + +def bucket_test_iam_permissions(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + permissions = ["storage.buckets.get", "storage.buckets.create"] + bucket.test_iam_permissions(permissions) + + +def bucket_lock_retention_policy(client, _preconditions, **resources): + bucket = client.bucket(resources.get("bucket").name) + bucket.retention_period = 60 + bucket.patch() + bucket.lock_retention_policy() + + +def notification_create(client, _preconditions, **resources): + bucket = client.get_bucket(resources.get("bucket").name) + notification = bucket.notification() + notification.create() + + +def bucket_list_notifications(client, _preconditions, **resources): + bucket = resources.get("bucket") + notifications = client.bucket(bucket.name).list_notifications() + for n in notifications: + pass + + +def bucket_get_notification(client, _preconditions, **resources): + bucket = resources.get("bucket") + notification = resources.get("notification") + client.bucket(bucket.name).get_notification(notification.notification_id) + + +def notification_reload(client, _preconditions, **resources): + notification = client.bucket(resources.get("bucket").name).notification( + notification_id=resources.get("notification").notification_id + ) + notification.reload() + + +def notification_exists(client, _preconditions, **resources): + notification = client.bucket(resources.get("bucket").name).notification( + notification_id=resources.get("notification").notification_id + ) + notification.exists() + + +def notification_delete(client, _preconditions, **resources): + notification = client.bucket(resources.get("bucket").name).notification( + notification_id=resources.get("notification").notification_id + ) + notification.delete() + + +def client_list_hmac_keys(client, _preconditions, **_): + hmac_keys = client.list_hmac_keys() + for k in hmac_keys: + pass + + +def client_get_service_account_email(client, _preconditions, **_): + client.get_service_account_email() + + +def bucket_patch(client, _preconditions, **resources): + bucket = client.get_bucket(resources.get("bucket").name) + metageneration = bucket.metageneration + bucket.storage_class = "COLDLINE" + if _preconditions: + bucket.patch(if_metageneration_match=metageneration) + else: + bucket.patch() + + +def bucket_update(client, _preconditions, **resources): + bucket = client.get_bucket(resources.get("bucket").name) + metageneration = bucket.metageneration + bucket._properties = {"storageClass": "STANDARD"} + if _preconditions: + bucket.update(if_metageneration_match=metageneration) + else: + bucket.update() + + +def bucket_set_iam_policy(client, _preconditions, **resources): + bucket = client.get_bucket(resources.get("bucket").name) + role = "roles/storage.objectViewer" + member = _CONF_TEST_SERVICE_ACCOUNT_EMAIL + + policy = bucket.get_iam_policy(requested_policy_version=3) + policy.bindings.append({"role": role, "members": {member}}) + if _preconditions: + bucket.set_iam_policy(policy) + else: + # IAM policies have no metageneration: clear ETag to avoid checking that it matches. + policy.etag = None + bucket.set_iam_policy(policy) + + +def bucket_delete_blob(client, _preconditions, **resources): + object = resources.get("object") + bucket = client.bucket(resources.get("bucket").name) + if _preconditions: + generation = object.generation + bucket.delete_blob(object.name, if_generation_match=generation) + else: + bucket.delete_blob(object.name) + + +def bucket_delete_blobs(client, _preconditions, **resources): + object = resources.get("object") + bucket = client.bucket(resources.get("bucket").name) + sources = [object] + source_generations = [object.generation] + if _preconditions: + bucket.delete_blobs(sources, if_generation_match=source_generations) + else: + bucket.delete_blobs(sources) + + +def blob_delete(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + blob = client.bucket(bucket.name).blob(object.name) + if _preconditions: + blob.delete(if_generation_match=object.generation) + else: + blob.delete() + + +def blob_patch(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + blob = client.bucket(bucket.name).blob(object.name) + blob.metadata = {"foo": "bar"} + if _preconditions: + blob.patch(if_metageneration_match=object.metageneration) + else: + blob.patch() + + +def blob_update(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + blob = client.bucket(bucket.name).blob(object.name) + blob.metadata = {"foo": "bar"} + if _preconditions: + blob.update(if_metageneration_match=object.metageneration) + else: + blob.update() + + +def bucket_copy_blob(client, _preconditions, **resources): + object = resources.get("object") + bucket = client.bucket(resources.get("bucket").name) + destination = client.create_bucket(uuid.uuid4().hex) + if _preconditions: + bucket.copy_blob( + object, destination, new_name=uuid.uuid4().hex, if_generation_match=0 + ) + else: + bucket.copy_blob(object, destination) + + +def bucket_rename_blob(client, _preconditions, **resources): + object = resources.get("object") + bucket = client.bucket(resources.get("bucket").name) + blob = bucket.blob(resources.get("object").name) + new_name = uuid.uuid4().hex + if _preconditions: + bucket.rename_blob( + blob, + new_name, + if_generation_match=0, + if_source_generation_match=object.generation, + ) + else: + bucket.rename_blob(blob, new_name) + + +def blob_rewrite(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + new_blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + new_blob.metadata = {"foo": "bar"} + if _preconditions: + new_blob.rewrite(object, if_generation_match=0) + else: + new_blob.rewrite(object) + + +def blob_update_storage_class(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + blob = client.bucket(bucket.name).blob(object.name) + storage_class = "STANDARD" + if _preconditions: + blob.update_storage_class(storage_class, if_generation_match=object.generation) + else: + blob.update_storage_class(storage_class) + + +def blob_compose(client, _preconditions, **resources): + bucket = resources.get("bucket") + object = resources.get("object") + blob = client.bucket(bucket.name).blob(object.name) + blob_2 = bucket.blob(uuid.uuid4().hex) + blob_2.upload_from_string(_STRING_CONTENT) + sources = [blob_2] + if _preconditions: + blob.compose(sources, if_generation_match=object.generation) + else: + blob.compose(sources) + + +def blob_upload_from_string(client, _preconditions, **resources): + bucket = resources.get("bucket") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + if _preconditions: + blob.upload_from_string(_STRING_CONTENT, if_generation_match=0) + else: + blob.upload_from_string(_STRING_CONTENT) + + +def blob_upload_from_file(client, _preconditions, **resources): + bucket = resources.get("bucket") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + with tempfile.NamedTemporaryFile() as temp_f: + if _preconditions: + blob.upload_from_file(temp_f, if_generation_match=0) + else: + blob.upload_from_file(temp_f) + + +def blob_upload_from_filename(client, _preconditions, **resources): + bucket = resources.get("bucket") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + + with tempfile.NamedTemporaryFile() as temp_f: + if _preconditions: + blob.upload_from_filename(temp_f.name, if_generation_match=0) + else: + blob.upload_from_filename(temp_f.name) + + +def blobwriter_write(client, _preconditions, **resources): + chunk_size = 256 * 1024 + bucket = resources.get("bucket") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + if _preconditions: + with blob.open("wb", chunk_size=chunk_size, if_generation_match=0) as writer: + writer.write(_BYTE_CONTENT) + else: + with blob.open("wb", chunk_size=chunk_size) as writer: + writer.write(_BYTE_CONTENT) + + +def blob_create_resumable_upload_session(client, _preconditions, **resources): + bucket = resources.get("bucket") + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + if _preconditions: + blob.create_resumable_upload_session(if_generation_match=0) + else: + blob.create_resumable_upload_session() + + +######################################################################################################################################## +### Method Invocation Mapping ########################################################################################################## +######################################################################################################################################## + +# Method invocation mapping is a map whose keys are a string describing a standard +# API call (e.g. storage.objects.get) and values are a list of functions which +# wrap library methods that implement these calls. There may be multiple values +# because multiple library methods may use the same call (e.g. get could be a +# read or just a metadata get). + +method_mapping = { + "storage.buckets.delete": [bucket_delete], # S1 start + "storage.buckets.get": [ + client_get_bucket, + bucket_reload, + client_lookup_bucket, + bucket_exists, + ], + "storage.buckets.getIamPolicy": [bucket_get_iam_policy], + "storage.buckets.insert": [client_create_bucket, bucket_create], + "storage.buckets.list": [client_list_buckets], + "storage.buckets.lockRetentionPolicy": [bucket_lock_retention_policy], + "storage.buckets.testIamPermissions": [bucket_test_iam_permissions], + "storage.notifications.delete": [notification_delete], + "storage.notifications.get": [ + bucket_get_notification, + notification_exists, + notification_reload, + ], + "storage.notifications.list": [bucket_list_notifications], + "storage.objects.get": [ + bucket_get_blob, + blob_exists, + client_download_blob_to_file, + blob_download_to_filename, + blob_download_as_bytes, + blob_download_as_text, + blobreader_read, + ], + "storage.objects.list": [ + client_list_blobs, + bucket_list_blobs, + bucket_delete, + ], # S1 end + "storage.buckets.patch": [bucket_patch], # S2/S3 start + "storage.buckets.setIamPolicy": [bucket_set_iam_policy], + "storage.buckets.update": [bucket_update], + "storage.objects.compose": [blob_compose], + "storage.objects.copy": [bucket_copy_blob, bucket_rename_blob], + "storage.objects.delete": [ + bucket_delete_blob, + bucket_delete_blobs, + blob_delete, + bucket_rename_blob, + ], + "storage.objects.insert": [ + blob_upload_from_string, + blob_upload_from_file, + blob_upload_from_filename, + blobwriter_write, + blob_create_resumable_upload_session, + ], + "storage.objects.patch": [blob_patch], + "storage.objects.rewrite": [blob_rewrite, blob_update_storage_class], + "storage.objects.update": [blob_update], # S2/S3 end +} + + +######################################################################################################################################## +### Pytest Fixtures to Populate Resources ############################################################################################## +######################################################################################################################################## + + +@pytest.fixture +def client(): + host = os.environ.get(_STORAGE_EMULATOR_ENV_VAR) + client = storage.Client( + project=_CONF_TEST_PROJECT_ID, + credentials=AnonymousCredentials(), + client_options={"api_endpoint": host}, + ) + return client + + +@pytest.fixture +def bucket(client): + bucket = client.bucket(uuid.uuid4().hex) + client.create_bucket(bucket) + yield bucket + try: + bucket.delete(force=True) + except Exception: + # in cases where resources are deleted within the test + # TODO(cathyo@): narrow except to NotFound once the emulator response issue is resolved + pass + + +@pytest.fixture +def object(client, bucket): + blob = client.bucket(bucket.name).blob(uuid.uuid4().hex) + blob.upload_from_string(_STRING_CONTENT) + blob.reload() + yield blob + try: + blob.delete() + except Exception: # in cases where resources are deleted within the test + pass + + +@pytest.fixture +def notification(client, bucket): + notification = client.bucket(bucket.name).notification() + notification.create() + notification.reload() + yield notification + try: + notification.delete() + except Exception: # in cases where resources are deleted within the test + pass + + +@pytest.fixture +def hmac_key(client): + hmac_key, _secret = client.create_hmac_key( + service_account_email=_CONF_TEST_SERVICE_ACCOUNT_EMAIL, + project_id=_CONF_TEST_PROJECT_ID, + ) + yield hmac_key + try: + hmac_key.state = "INACTIVE" + hmac_key.update() + hmac_key.delete() + except Exception: # in cases where resources are deleted within the test + pass + + +######################################################################################################################################## +### Helper Methods for Testbench Retry Test API ######################################################################################## +######################################################################################################################################## + + +""" +The Retry Test API in the testbench is used to run the retry conformance tests. It offers a mechanism to describe more complex +retry scenarios while sending a single, constant header through all the HTTP requests from a test program. The Retry Test API +can be accessed by adding the path "/retry-test" to the host. See also: https://github.com/googleapis/storage-testbench +""" + + +def _create_retry_test(host, method_name, instructions): + """ + For each test case, initialize a Retry Test resource by loading a set of + instructions to the testbench host. The instructions include an API method + and a list of errors. An unique id is created for each Retry Test resource. + """ + import json + + retry_test_uri = host + "/retry_test" + headers = { + "Content-Type": "application/json", + } + data_dict = {"instructions": {method_name: instructions}} + data = json.dumps(data_dict) + r = requests.post(retry_test_uri, headers=headers, data=data) + return r.json() + + +def _get_retry_test(host, id): + """ + Retrieve the state of the Retry Test resource, including the unique id, + instructions, and a boolean status "completed". This can be used to verify + if all instructions were used as expected. + """ + get_retry_test_uri = "{base}{retry}/{id}".format( + base=host, retry="/retry_test", id=id + ) + r = requests.get(get_retry_test_uri) + return r.json() + + +def _run_retry_test( + host, id, lib_func, _preconditions, bucket, object, notification, hmac_key +): + """ + To execute tests against the list of instrucions sent to the Retry Test API, + create a client to send the retry test ID using the x-retry-test-id header + in each request. For incoming requests that match the test ID and API method, + the testbench will pop off the next instruction from the list and force the + listed failure case. + """ + client = storage.Client( + project=_CONF_TEST_PROJECT_ID, + credentials=AnonymousCredentials(), + client_options={"api_endpoint": host}, + ) + client._http.headers.update({"x-retry-test-id": id}) + lib_func( + client, + _preconditions, + bucket=bucket, + object=object, + notification=notification, + hmac_key=hmac_key, + ) + + +def _delete_retry_test(host, id): + """ + Delete the Retry Test resource by id. + """ + get_retry_test_uri = "{base}{retry}/{id}".format( + base=host, retry="/retry_test", id=id + ) + requests.delete(get_retry_test_uri) + + +######################################################################################################################################## +### Run Test Case for Retry Strategy ################################################################################################### +######################################################################################################################################## + + +def run_test_case( + scenario_id, method, case, lib_func, host, bucket, object, notification, hmac_key +): + scenario = _CONFORMANCE_TESTS[scenario_id - 1] + expect_success = scenario["expectSuccess"] + precondition_provided = scenario["preconditionProvided"] + method_name = method["name"] + instructions = case["instructions"] + + try: + r = _create_retry_test(host, method_name, instructions) + id = r["id"] + except Exception as e: + raise Exception( + "Error creating retry test for {}: {}".format(method_name, e) + ).with_traceback(e.__traceback__) + + # Run retry tests on library methods. + try: + _run_retry_test( + host, + id, + lib_func, + precondition_provided, + bucket, + object, + notification, + hmac_key, + ) + except Exception as e: + logging.exception( + "Caught an exception while running retry instructions\n {}".format(e) + ) + success_results = False + else: + success_results = True + + # Assert expected success for each scenario. + assert ( + expect_success == success_results + ), "Retry API call expected_success was {}, should be {}".format( + success_results, expect_success + ) + + # Verify that all instructions were used up during the test + # (indicates that the client sent the correct requests). + status_response = _get_retry_test(host, id) + assert ( + status_response["completed"] is True + ), "Retry test not completed; unused instructions:{}".format( + status_response["instructions"] + ) + + # Clean up and close out test in testbench. + _delete_retry_test(host, id) + + +######################################################################################################################################## +### Run Conformance Tests for Retry Strategy ########################################################################################### +######################################################################################################################################## + +for scenario in _CONFORMANCE_TESTS: + host = os.environ.get(_STORAGE_EMULATOR_ENV_VAR) + if host is None: + logging.error( + "This test must use the testbench emulator; set STORAGE_EMULATOR_HOST to run." + ) + break + + id = scenario["id"] + methods = scenario["methods"] + cases = scenario["cases"] + for c in cases: + for m in methods: + method_name = m["name"] + if method_name not in method_mapping: + logging.info("No tests for operation {}".format(method_name)) + continue + + for lib_func in method_mapping[method_name]: + test_name = "test-S{}-{}-{}".format(id, method_name, lib_func.__name__) + globals()[test_name] = functools.partial( + run_test_case, id, m, c, lib_func, host + ) diff --git a/tests/system/_helpers.py b/tests/system/_helpers.py index 72045d1b6..184cf9f56 100644 --- a/tests/system/_helpers.py +++ b/tests/system/_helpers.py @@ -82,6 +82,11 @@ def delete_blob(blob): retry(blob.delete)(timeout=120) # seconds except exceptions.NotFound: # race pass + except exceptions.Forbidden: # event-based hold + blob.event_based_hold = False + blob.patch() + retry_no_event_based_hold(blob.reload)() + retry(blob.delete)(timeout=120) # seconds def delete_bucket(bucket): diff --git a/tests/system/test_bucket.py b/tests/system/test_bucket.py index 2fdd64fe4..55ea09057 100644 --- a/tests/system/test_bucket.py +++ b/tests/system/test_bucket.py @@ -111,7 +111,9 @@ def test_bucket_update_labels(storage_client, buckets_to_delete): assert bucket.labels == new_labels bucket.labels = {} - bucket.update() + # See https://github.com/googleapis/python-storage/issues/541 + retry_400 = _helpers.RetryErrors(exceptions.BadRequest) + retry_400(bucket.update)() assert bucket.labels == {} @@ -565,7 +567,7 @@ def test_bucket_w_retention_period( assert not bucket.default_event_based_hold assert not bucket.retention_policy_locked - other.reload() + _helpers.retry_no_event_based_hold(other.reload)() assert not other.event_based_hold assert not other.temporary_hold @@ -704,7 +706,7 @@ def test_new_bucket_w_ubla( bucket_acl.reload() bucket_acl.loaded = True # Fake that we somehow loaded the ACL - bucket_acl.all().grant_read() + bucket_acl.group("cloud-developer-relations@google.com").grant_read() with pytest.raises(exceptions.BadRequest): bucket_acl.save() @@ -722,7 +724,7 @@ def test_new_bucket_w_ubla( blob_acl.reload() blob_acl.loaded = True # Fake that we somehow loaded the ACL - blob_acl.all().grant_read() + blob_acl.group("cloud-developer-relations@google.com").grant_read() with pytest.raises(exceptions.BadRequest): blob_acl.save() diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index ac8471d07..3b022e191 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -228,3 +228,18 @@ def test_mtls(self): self.assertEqual(conn.ALLOW_AUTO_SWITCH_TO_MTLS_URL, False) self.assertEqual(conn.API_BASE_URL, "http://foo") self.assertEqual(conn.API_BASE_MTLS_URL, "https://storage.mtls.googleapis.com") + + def test_duplicate_user_agent(self): + # Regression test for issue #565 + from google.cloud._http import ClientInfo + from google.cloud.storage.batch import Batch + from google.cloud.storage import __version__ + + client_info = ClientInfo(user_agent="test/123") + conn = self._make_one(object(), client_info=client_info) + expected_user_agent = "test/123 gcloud-python/{} ".format(__version__) + self.assertEqual(conn._client_info.user_agent, expected_user_agent) + + client = mock.Mock(_connection=conn, spec=["_connection"]) + batch = Batch(client) + self.assertEqual(batch._client_info.user_agent, expected_user_agent) diff --git a/tests/unit/test__signing.py b/tests/unit/test__signing.py index 3eac70cc1..fbfa6052f 100644 --- a/tests/unit/test__signing.py +++ b/tests/unit/test__signing.py @@ -44,6 +44,24 @@ def _utc_seconds(when): return int(calendar.timegm(when.timetuple())) +def _make_cet_timezone(): + try: + from datetime import timezone + + except ImportError: # Python 2.7 + from google.cloud._helpers import _UTC + + class CET(_UTC): + _tzname = "CET" + _utcoffset = datetime.timedelta(hours=1) + + return CET() + else: + from datetime import timedelta + + return timezone(timedelta(hours=1), name="CET") + + class Test_get_expiration_seconds_v2(unittest.TestCase): @staticmethod def _call_fut(expiration): @@ -81,13 +99,7 @@ def test_w_expiration_utc_datetime(self): self.assertEqual(self._call_fut(expiration_utc), utc_seconds) def test_w_expiration_other_zone_datetime(self): - from google.cloud._helpers import _UTC - - class CET(_UTC): - _tzname = "CET" - _utcoffset = datetime.timedelta(hours=1) - - zone = CET() + zone = _make_cet_timezone() expiration_other = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, zone) utc_seconds = _utc_seconds(expiration_other) cet_seconds = utc_seconds - (60 * 60) # CET one hour earlier than UTC @@ -198,13 +210,8 @@ def test_w_expiration_utc_datetime(self): def test_w_expiration_other_zone_datetime(self): from google.cloud._helpers import UTC - from google.cloud._helpers import _UTC - class CET(_UTC): - _tzname = "CET" - _utcoffset = datetime.timedelta(hours=1) - - zone = CET() + zone = _make_cet_timezone() fake_utcnow = datetime.datetime(2004, 8, 19, 0, 0, 0, 0, UTC) fake_cetnow = fake_utcnow.astimezone(zone) delta = datetime.timedelta(seconds=10) @@ -818,6 +825,8 @@ def test_get_v4_now_dtstamps(self): self.assertEqual(datestamp, "20200312") +"""Conformance tests for v4 signed URLs.""" + _FAKE_SERVICE_ACCOUNT = None diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index e3b770763..cc2466ac8 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -366,10 +366,10 @@ def test_ctor_defaults(self): def test_ctor_explicit_ubla(self): import datetime - import pytz + from google.cloud._helpers import UTC bucket = self._make_bucket() - now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) + now = datetime.datetime.utcnow().replace(tzinfo=UTC) config = self._make_one( bucket, @@ -403,10 +403,10 @@ def test_ctor_explicit_pap(self): def test_ctor_explicit_bpo(self): import datetime - import pytz + from google.cloud._helpers import UTC bucket = self._make_bucket() - now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) + now = datetime.datetime.utcnow().replace(tzinfo=UTC) config = pytest.deprecated_call( self._make_one, @@ -433,10 +433,10 @@ def test_ctor_ubla_and_bpo_enabled(self): def test_ctor_ubla_and_bpo_time(self): import datetime - import pytz + from google.cloud._helpers import UTC bucket = self._make_bucket() - now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) + now = datetime.datetime.utcnow().replace(tzinfo=UTC) with self.assertRaises(ValueError): self._make_one( @@ -481,12 +481,12 @@ def test_from_api_repr_w_disabled(self): def test_from_api_repr_w_enabled(self): import datetime - import pytz + from google.cloud._helpers import UTC from google.cloud._helpers import _datetime_to_rfc3339 klass = self._get_target_class() bucket = self._make_bucket() - now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) + now = datetime.datetime.utcnow().replace(tzinfo=UTC) resource = { "uniformBucketLevelAccess": { "enabled": True, @@ -2174,11 +2174,11 @@ def test_iam_configuration_policy_missing(self): def test_iam_configuration_policy_w_entry(self): import datetime - import pytz + from google.cloud._helpers import UTC from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud.storage.bucket import IAMConfiguration - now = datetime.datetime.utcnow().replace(tzinfo=pytz.UTC) + now = datetime.datetime.utcnow().replace(tzinfo=UTC) NAME = "name" properties = { "iamConfiguration": { diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 4998c54f4..85c4bc5e2 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -785,6 +785,51 @@ def test__delete_resource_hit_w_explicit(self): _target_object=target, ) + def test__bucket_arg_to_bucket_w_bucket_w_client(self): + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + other_client = mock.Mock(spec=[]) + bucket_name = "w_client" + + bucket = Bucket(other_client, name=bucket_name) + + found = client._bucket_arg_to_bucket(bucket) + + self.assertIs(found, bucket) + self.assertIs(found.client, other_client) + + def test__bucket_arg_to_bucket_w_bucket_wo_client(self): + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + bucket_name = "wo_client" + + bucket = Bucket(client=None, name=bucket_name) + + found = client._bucket_arg_to_bucket(bucket) + + self.assertIs(found, bucket) + self.assertIs(found.client, client) + + def test__bucket_arg_to_bucket_w_bucket_name(self): + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + bucket_name = "string-name" + + found = client._bucket_arg_to_bucket(bucket_name) + + self.assertIsInstance(found, Bucket) + self.assertEqual(found.name, bucket_name) + self.assertIs(found.client, client) + def test_get_bucket_miss_w_string_w_defaults(self): from google.cloud.exceptions import NotFound from google.cloud.storage.bucket import Bucket @@ -1778,7 +1823,7 @@ def _create_hmac_key_helper( self, explicit_project=None, user_project=None, timeout=None, retry=None, ): import datetime - from pytz import UTC + from google.cloud._helpers import UTC from google.cloud.storage.hmac_key import HMACKeyMetadata project = "PROJECT" diff --git a/tests/unit/test_hmac_key.py b/tests/unit/test_hmac_key.py index 60d0c135b..59a2b221f 100644 --- a/tests/unit/test_hmac_key.py +++ b/tests/unit/test_hmac_key.py @@ -173,7 +173,7 @@ def test_state_setter_active(self): def test_time_created_getter(self): import datetime - from pytz import UTC + from google.cloud._helpers import UTC metadata = self._make_one() now = datetime.datetime.utcnow() @@ -183,7 +183,7 @@ def test_time_created_getter(self): def test_updated_getter(self): import datetime - from pytz import UTC + from google.cloud._helpers import UTC metadata = self._make_one() now = datetime.datetime.utcnow()