diff --git a/.flake8 b/.flake8 index ed9316381..29227d4cf 100644 --- a/.flake8 +++ b/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 000000000..9602d5405 --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,3 @@ +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3 diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 000000000..c2e0f4b92 --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,19 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/repo-automation-bots/owlbot-python:latest + +begin-after-commit-hash: 6acf4a0a797f1082027985c55c4b14b60f673dd7 + diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 000000000..6fe78aa79 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt", "**/__init__.py", "samples/**/constraints.txt", "samples/**/constraints-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.gitignore b/.gitignore index b9daa52f1..b4243ced7 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 9e7febd82..9f144307d 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-storage +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-storage" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index ea96964f1..308bf124b 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -30,7 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2-staging" + value: "docs-staging-v2" } # It will upload the docker image after successful builds. diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 111810782..a5a723164 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-storage/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.kokoro/release.sh b/.kokoro/release.sh index 268407736..7970969eb 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google_cloud_pypi_password") +TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") cd github/python-storage python3 setup.py sdist bdist_wheel -twine upload --username gcloudpypi --password "${TWINE_PASSWORD}" dist/* +twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index b96ee4f07..58a3ff6b8 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,18 +23,8 @@ env_vars: { value: "github/python-storage/.kokoro/release.sh" } -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} \ No newline at end of file + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" +} diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index 6287c9952..04e100210 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-storage/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index fb9bedb81..0089e9b79 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-storage/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index 52a03a568..2f92d6c76 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-storage/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 000000000..f9cfcd33e --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 000000000..aa5013db2 --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-storage + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 000000000..cf5de74c1 --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index cdf5d4e32..421439bc8 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,81 +28,19 @@ cd github/python-storage # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" \ No newline at end of file +exec .kokoro/test-samples-impl.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2a87c6d4d..62eb5a77d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,8 +1,22 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.0.1 hooks: - id: trailing-whitespace - id: end-of-file-fixer @@ -12,6 +26,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.9.1 + rev: 3.9.2 hooks: - id: flake8 diff --git a/.repo-metadata.json b/.repo-metadata.json index 499d6158c..315fd7657 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -6,6 +6,7 @@ "issue_tracker": "https://issuetracker.google.com/savedsearches/559782", "release_level": "ga", "language": "python", + "library_type": "GAPIC_MANUAL", "repo": "googleapis/python-storage", "distribution_name": "google-cloud-storage", "api_id": "storage.googleapis.com", diff --git a/.trampolinerc b/.trampolinerc index 995ee2911..383b6ec89 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CHANGELOG.md b/CHANGELOG.md index 4491c5cad..066b75505 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,27 @@ [1]: https://pypi.org/project/google-cloud-storage/#history +## [1.39.0](https://www.github.com/googleapis/python-storage/compare/v1.38.0...v1.39.0) (2021-06-21) + + +### Features + +* media operation retries can be configured using the same interface as with non-media operation ([#447](https://www.github.com/googleapis/python-storage/issues/447)) ([0dbbb8a](https://www.github.com/googleapis/python-storage/commit/0dbbb8ac17a4b632707485ee6c7cc15e4670efaa)) + + +### Bug Fixes + +* add ConnectionError to default retry ([#445](https://www.github.com/googleapis/python-storage/issues/445)) ([8344253](https://www.github.com/googleapis/python-storage/commit/8344253a1969b9d04b81f87a6d7bddd3ddb55006)) +* apply idempotency policies for ACLs ([#458](https://www.github.com/googleapis/python-storage/issues/458)) ([2232f38](https://www.github.com/googleapis/python-storage/commit/2232f38933dbdfeb4f6585291794d332771ffdf2)) +* replace python lifecycle action parsing ValueError with warning ([#437](https://www.github.com/googleapis/python-storage/issues/437)) ([2532d50](https://www.github.com/googleapis/python-storage/commit/2532d506b44fc1ef0fa0a996822d29e7459c465a)) +* revise blob.compose query parameters `if_generation_match` ([#454](https://www.github.com/googleapis/python-storage/issues/454)) ([70d19e7](https://www.github.com/googleapis/python-storage/commit/70d19e72831dee112bb07f38b50beef4890c1155)) + + +### Documentation + +* streamline 'timeout' / 'retry' docs in docstrings ([#461](https://www.github.com/googleapis/python-storage/issues/461)) ([78b2eba](https://www.github.com/googleapis/python-storage/commit/78b2eba81003b437cd24f2b8d269ea2455682507)) +* streamline docstrings for conditional parmas ([#464](https://www.github.com/googleapis/python-storage/issues/464)) ([6999370](https://www.github.com/googleapis/python-storage/commit/69993702390322df07cc2e818003186a47524c2b)) + ## [1.38.0](https://www.github.com/googleapis/python-storage/compare/v1.37.1...v1.38.0) (2021-04-26) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b3d1f6029..039f43681 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/LICENSE b/LICENSE index a8ee855de..d64569567 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d120..e783f4c62 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 000000000..8b58ae9c0 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,7 @@ +# Security Policy + +To report a security issue, please use [g.co/vulnz](https://g.co/vulnz). + +The Google Security Team will respond within 5 working days of your report on g.co/vulnz. + +We use g.co/vulnz for our intake, and do coordination and disclosure here using GitHub Security Advisory to privately discuss and fix the issue. diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf229f..b0a295464 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,20 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} + +/* Insert space between methods for readability */ +dl.method { + padding-top: 10px; + padding-bottom: 10px +} + +/* Insert empty space between classes */ +dl.class { + padding-bottom: 50px +} diff --git a/docs/conf.py b/docs/conf.py index 858ffec80..a25e7b866 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,17 @@ # -*- coding: utf-8 -*- +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. # # google-cloud-storage documentation build configuration file # @@ -67,9 +80,9 @@ master_doc = "index" # General information about the project. -project = u"google-cloud-storage" -copyright = u"2019, Google" -author = u"Google APIs" +project = "google-cloud-storage" +copyright = "2019, Google" +author = "Google APIs" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -268,7 +281,7 @@ ( master_doc, "google-cloud-storage.tex", - u"google-cloud-storage Documentation", + "google-cloud-storage Documentation", author, "manual", ) @@ -303,7 +316,7 @@ ( master_doc, "google-cloud-storage", - u"google-cloud-storage Documentation", + "google-cloud-storage Documentation", [author], 1, ) @@ -322,7 +335,7 @@ ( master_doc, "google-cloud-storage", - u"google-cloud-storage Documentation", + "google-cloud-storage Documentation", author, "google-cloud-storage", "google-cloud-storage Library", @@ -345,10 +358,13 @@ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "python": ("https://python.readthedocs.org/en/latest/", None), + "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), - "grpc": ("https://grpc.io/grpc/python/", None), + "grpc": ("https://grpc.github.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), + "requests": ("https://docs.python-requests.org/en/master/", None), } diff --git a/docs/generation_metageneration.rst b/docs/generation_metageneration.rst new file mode 100644 index 000000000..287e6573a --- /dev/null +++ b/docs/generation_metageneration.rst @@ -0,0 +1,127 @@ +Conditional Requests Via Generation / Metageneration Preconditions +================================================================== + +Preconditions tell Cloud Storage to only perform a request if the +:ref:`generation ` or +:ref:`metageneration ` number of the affected object +meets your precondition criteria. These checks of the generation and +metageneration numbers ensure that the object is in the expected state, +allowing you to perform safe read-modify-write updates and conditional +operations on objects + +Concepts +-------- + +.. _concept-metageneration: + +Metageneration +:::::::::::::: + +When you create a :class:`~google.cloud.storage.bucket.Bucket`, +its :attr:`~google.cloud.storage.bucket.Bucket.metageneration` is initialized +to ``1``, representing the initial version of the bucket's metadata. + +When you first upload a +:class:`~google.cloud.storage.blob.Blob` ("Object" in the GCS back-end docs), +its :attr:`~google.cloud.storage.blob.Blob.metageneration` is likewise +initialized to ``1``. representing the initial version of the blob's metadata. + +The ``metageneration`` attribute is set by the GCS back-end, and is read-only +in the client library. + +Each time you patch or update the bucket's / blob's metadata, its +``metageneration`` is incremented. + + +.. _concept-generation: + +Generation +:::::::::: + +Each time you upload a new version of a file to a +:class:`~google.cloud.storage.blob.Blob` ("Object" in the GCS back-end docs), +the Blob's :attr:`~google.cloud.storage.blob.generation` is changed, and its +:attr:`~google.cloud.storage.blob.metageneration` is reset to ``1`` (the first +metadata version for that generation of the blob). + +The ``generation`` attribute is set by the GCS back-end, and is read-only +in the client library. + +See also +:::::::: + +- `Storage API Generation Precondition docs`_ + +.. _Storage API Generation Precondition docs: + https://cloud.google.com/storage/docs/generations-preconditions + + +Conditional Parameters +---------------------- + +.. _using-if-generation-match: + +Using ``if_generation_match`` +::::::::::::::::::::::::::::: + +Passing the ``if_generation_match`` parameter to a method which retrieves a +blob resource (e.g., +:meth:`Blob.reload `) or modifies +the blob (e.g., +:meth:`Blob.update `) +makes the operation conditional on whether the blob's current ``generation`` +matches the given value. + +As a special case, passing ``0`` as the value for``if_generation_match`` +makes the operation succeed only if there are no live versions of the blob. + + +.. _using-if-generation-not-match: + +Using ``if_generation_not_match`` +::::::::::::::::::::::::::::::::: + +Passing the ``if_generation_not_match`` parameter to a method which retrieves +a blob resource (e.g., +:meth:`Blob.reload `) or modifies +the blob (e.g., +:meth:`Blob.update `) +makes the operation conditional on whether the blob's current ``generation`` +does **not** match the given value. + +If no live version of the blob exists, the precondition fails. + +As a special case, passing ``0`` as the value for ``if_generation_not_match`` +makes the operation succeed only if there **is** a live version of the blob. + + +.. _using-if-metageneration-match: + +Using ``if_metageneration_match`` +::::::::::::::::::::::::::::::::: + +Passing the ``if_metageneration_match`` parameter to a method which retrieves +a blob or bucket resource +(e.g., :meth:`Blob.reload `, +:meth:`Bucket.reload `) +or modifies the blob or bucket (e.g., +:meth:`Blob.update ` +:meth:`Bucket.patch `) +makes the operation conditional on whether the resource's current +``metageneration`` matches the given value. + + +.. _using-if-metageneration-not-match: + +Using ``if_metageneration_not_match`` +::::::::::::::::::::::::::::::::::::: + +Passing the ``if_metageneration_not_match`` parameter to a method which +retrieves a blob or bucket resource +(e.g., :meth:`Blob.reload `, +:meth:`Bucket.reload `) +or modifies the blob or bucket (e.g., +:meth:`Blob.update ` +:meth:`Bucket.patch `) +makes the operation conditional on whether the resource's current +``metageneration`` does **not** match the given value. diff --git a/docs/index.rst b/docs/index.rst index 7a74f12cd..9ece79741 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -21,6 +21,8 @@ API Reference constants hmac_key notification + retry_timeout + generation_metageneration Changelog --------- diff --git a/docs/retry_timeout.rst b/docs/retry_timeout.rst new file mode 100644 index 000000000..b7fc4ff41 --- /dev/null +++ b/docs/retry_timeout.rst @@ -0,0 +1,152 @@ +Configuring Timeouts and Retries +================================ + +When using object methods which invoke Google Cloud Storage API methods, +you have several options for how the library handles timeouts and +how it retries transient errors. + + +.. _configuring_timeouts: + +Configuring Timeouts +-------------------- + +For a number of reasons, methods which invoke API methods may take +longer than expected or desired. By default, such methods all time out +after a default interval, 60.0 seconds. Rather than blocking your application +code for that interval, you may choose to configure explicit timeouts +in your code, using one of three forms: + +- You can pass a single integer or float which functions as the timeout for the + entire request. E.g.: + +.. code-block:: python + + bucket = client.get_bucket(BUCKET_NAME, timeout=300.0) # five minutes + +- You can also be passed as a two-tuple, ``(connect_timeout, read_timeout)``, + where the ``connect_timeout`` sets the maximum time required to establish + the connection to the server, and the ``read_timeout`` sets the maximum + time to wait for a completed response. E.g.: + +.. code-block:: python + + bucket = client.get_bucket(BUCKET_NAME, timeout=(3, 10)) + + +- You can also pass ``None`` as the timeout value: in this case, the library + will block indefinitely for a response. E.g.: + +.. code-block:: python + + bucket = client.get_bucket(BUCKET_NAME, timeout=None) + +.. note:: + Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + +See also: + + :ref:`Timeouts in requests ` + + +.. _configuring_retries: + +Configuring Retries +-------------------- + +.. note:: + + For more background on retries, see also the + `GCS Retry Strategies Document `_ + +Methods which invoke API methods may fail for a number of reasons, some of +which represent "transient" conditions, and thus can be retried +automatically. The library tries to provide a sensible default retry policy +for each method, base on its semantics: + +- For API requests which are always idempotent, the library uses its + :data:`~google.cloud.storage.retry.DEFAULT_RETRY` policy, which + retries any API request which returns a "transient" error. + +- For API requests which are idempotent only if the blob has + the same "generation", the library uses its + :data:`~google.cloud.storage.retry.DEFAULT_RETRY_IF_GENERATION_SPECIFIED` + policy, which retries API requests which returns a "transient" error, + but only if the original request includes an ``ifGenerationMatch`` header. + +- For API requests which are idempotent only if the bucket or blob has + the same "metageneration", the library uses its + :data:`~google.cloud.storage.retry.DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED` + policy, which retries API requests which returns a "transient" error, + but only if the original request includes an ``ifMetagenerationMatch`` header. + +- For API requests which are idempotent only if the bucket or blob has + the same "etag", the library uses its + :data:`~google.cloud.storage.retry.DEFAULT_RETRY_IF_ETAG_IN_JSON` + policy, which retries API requests which returns a "transient" error, + but only if the original request includes an ``ETAG`` in its payload. + +- For those API requests which are never idempotent, the library passes + ``retry=None`` by default, suppressing any retries. + +Rather than using one of the default policies, you may choose to configure an +explicit policy in your code. + +- You can pass ``None`` as a retry policy to disable retries. E.g.: + +.. code-block:: python + + bucket = client.get_bucket(BUCKET_NAME, retry=None) + +- You can pass an instance of :class:`google.api_core.retry.Retry` to enable + retries; the passed object will define retriable response codes and errors, + as well as configuring backoff and retry interval options. E.g.: + +.. code-block:: python + + from google.api_core import exceptions + from google.api_core.retry import Retry + + _MY_RETRIABLE_TYPES = [ + exceptions.TooManyRequests, # 429 + exceptions.InternalServerError, # 500 + exceptions.BadGateway, # 502 + exceptions.ServiceUnavailable, # 503 + ] + + def is_retryable(exc): + return isinstance(exc, _MY_RETRIABLE_TYPES) + + my_retry_policy = Retry(predicate=is_retryable) + bucket = client.get_bucket(BUCKET_NAME, retry=my_retry_policy) + +- You can pass an instance of + :class:`google.cloud.storage.retry.ConditionalRetryPolicy`, which wraps a + :class:`~google.cloud.storage.retry.RetryPolicy`, activating it only if + certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to + potential data duplication or other side-effects) but become safe to retry + if a condition such as if_metageneration_match is set. E.g.: + +.. code-block:: python + + from google.api_core.retry import Retry + from google.cloud.storage.retry import ConditionalRetryPolicy + from google.cloud.storage.retry import is_etag_in_json + + def is_retryable(exc): + ... # as above + + my_retry_policy = Retry(predicate=is_retryable) + my_cond_policy = ConditionalRetryPolicy( + my_retry_policy, conditional_predicate=is_etag_in_json) + bucket = client.get_bucket(BUCKET_NAME, retry=my_cond_policy) + + +Retry Module API +---------------- + +.. automodule:: google.cloud.storage.retry + :members: + :show-inheritance: diff --git a/google/cloud/storage/_helpers.py b/google/cloud/storage/_helpers.py index 338b79861..ff5767de7 100644 --- a/google/cloud/storage/_helpers.py +++ b/google/cloud/storage/_helpers.py @@ -23,6 +23,7 @@ import os from six.moves.urllib.parse import urlsplit +from google import resumable_media from google.cloud.storage.constants import _DEFAULT_TIMEOUT from google.cloud.storage.retry import DEFAULT_RETRY from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED @@ -45,6 +46,12 @@ ("if_source_metageneration_not_match", "ifSourceMetagenerationNotMatch"), ) +_NUM_RETRIES_MESSAGE = ( + "`num_retries` has been deprecated and will be removed in a future " + "release. Use the `retry` argument with a Retry or ConditionalRetryPolicy " + "object, or None, instead." +) + def _get_storage_host(): return os.environ.get(STORAGE_EMULATOR_ENV_VAR, _DEFAULT_STORAGE_HOST) @@ -140,11 +147,11 @@ def reload( self, client=None, projection="noAcl", - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, ): """Reload properties from Cloud Storage. @@ -161,47 +168,30 @@ def reload( Defaults to ``'noAcl'``. Specifies the set of properties to return. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ client = self._require_client(client) query_params = self._query_params @@ -215,14 +205,13 @@ def reload( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, ) - api_response = client._connection.api_request( - method="GET", - path=self.path, + api_response = client._get_resource( + self.path, query_params=query_params, headers=self._encryption_headers(), - _target_object=self, timeout=timeout, retry=retry, + _target_object=self, ) self._set_properties(api_response) @@ -257,11 +246,11 @@ def _set_properties(self, value): def patch( self, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, ): """Sends all changed properties in a PATCH request. @@ -275,47 +264,30 @@ def patch( :param client: the client to use. If not passed, falls back to the ``client`` stored on the current object. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ client = self._require_client(client) query_params = self._query_params @@ -332,10 +304,9 @@ def patch( update_properties = {key: self._properties[key] for key in self._changes} # Make the API call. - api_response = client._connection.api_request( - method="PATCH", - path=self.path, - data=update_properties, + api_response = client._patch_resource( + self.path, + update_properties, query_params=query_params, _target_object=self, timeout=timeout, @@ -346,11 +317,11 @@ def patch( def update( self, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, ): """Sends all properties in a PUT request. @@ -364,47 +335,30 @@ def update( :param client: the client to use. If not passed, falls back to the ``client`` stored on the current object. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ client = self._require_client(client) @@ -418,14 +372,13 @@ def update( if_metageneration_not_match=if_metageneration_not_match, ) - api_response = client._connection.api_request( - method="PUT", - path=self.path, - data=self._properties, + api_response = client._put_resource( + self.path, + self._properties, query_params=query_params, - _target_object=self, timeout=timeout, retry=retry, + _target_object=self, ) self._set_properties(api_response) @@ -563,3 +516,37 @@ def _bucket_bound_hostname_url(host, scheme=None): return host return "{scheme}://{host}/".format(scheme=scheme, host=host) + + +def _api_core_retry_to_resumable_media_retry(retry, num_retries=None): + """Convert google.api.core.Retry to google.resumable_media.RetryStrategy. + + Custom predicates are not translated. + + :type retry: google.api_core.Retry + :param retry: (Optional) The google.api_core.Retry object to translate. + + :type num_retries: int + :param num_retries: (Optional) The number of retries desired. This is + supported for backwards compatibility and is mutually exclusive with + `retry`. + + :rtype: google.resumable_media.RetryStrategy + :returns: A RetryStrategy with all applicable attributes copied from input, + or a RetryStrategy with max_retries set to 0 if None was input. + """ + + if retry is not None and num_retries is not None: + raise ValueError("num_retries and retry arguments are mutually exclusive") + + elif retry is not None: + return resumable_media.RetryStrategy( + max_sleep=retry._maximum, + max_cumulative_retry=retry._deadline, + initial_delay=retry._initial, + multiplier=retry._multiplier, + ) + elif num_retries is not None: + return resumable_media.RetryStrategy(max_retries=num_retries) + else: + return resumable_media.RetryStrategy(max_retries=0) diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py index 55c12c9b8..a17e4f09e 100644 --- a/google/cloud/storage/acl.py +++ b/google/cloud/storage/acl.py @@ -85,6 +85,7 @@ """ from google.cloud.storage.constants import _DEFAULT_TIMEOUT +from google.cloud.storage.retry import DEFAULT_RETRY class _ACLEntity(object): @@ -206,6 +207,7 @@ class ACL(object): # Subclasses must override to provide these attributes (typically, # as properties). + client = None reload_path = None save_path = None user_project = None @@ -217,11 +219,9 @@ def _ensure_loaded(self, timeout=_DEFAULT_TIMEOUT): """Load if not already loaded. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` """ if not self.loaded: self.reload(timeout=timeout) @@ -430,7 +430,7 @@ def _require_client(self, client): client = self.client return client - def reload(self, client=None, timeout=_DEFAULT_TIMEOUT): + def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): """Reload the ACL data from Cloud Storage. If :attr:`user_project` is set, bills the API request to that project. @@ -440,11 +440,13 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT): :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :type retry: :class:`~google.api_core.retry.Retry` + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ path = self.reload_path client = self._require_client(client) @@ -455,10 +457,11 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT): self.entities.clear() - found = client._connection.api_request( - method="GET", path=path, query_params=query_params, timeout=timeout, + found = client._get_resource( + path, query_params=query_params, timeout=timeout, retry=retry, ) self.loaded = True + for entry in found.get("items", ()): self.add_entity(self.entity_from_dict(entry)) @@ -477,14 +480,19 @@ def _save(self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT): ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. + :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :type retry: :class:`~google.api_core.retry.Retry` + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ + client = self._require_client(client) query_params = {"projection": "full"} + if predefined is not None: acl = [] query_params[self._PREDEFINED_QUERY_PARAM] = predefined @@ -493,18 +501,20 @@ def _save(self, acl, predefined, client, timeout=_DEFAULT_TIMEOUT): query_params["userProject"] = self.user_project path = self.save_path - client = self._require_client(client) - result = client._connection.api_request( - method="PATCH", - path=path, - data={self._URL_PATH_ELEM: list(acl)}, + result = client._patch_resource( + path, + {self._URL_PATH_ELEM: list(acl)}, query_params=query_params, timeout=timeout, + retry=None, ) + self.entities.clear() + for entry in result.get(self._URL_PATH_ELEM, ()): self.add_entity(self.entity_from_dict(entry)) + self.loaded = True def save(self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT): @@ -520,12 +530,11 @@ def save(self, acl=None, client=None, timeout=_DEFAULT_TIMEOUT): ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` """ if acl is None: acl = self @@ -552,12 +561,11 @@ def save_predefined(self, predefined, client=None, timeout=_DEFAULT_TIMEOUT): ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` """ predefined = self.validate_predefined(predefined) self._save(None, predefined, client, timeout=timeout) @@ -576,12 +584,11 @@ def clear(self, client=None, timeout=_DEFAULT_TIMEOUT): ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the ACL's parent. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` """ self.save([], client=client, timeout=timeout) diff --git a/google/cloud/storage/batch.py b/google/cloud/storage/batch.py index d40fdc6f5..732439f14 100644 --- a/google/cloud/storage/batch.py +++ b/google/cloud/storage/batch.py @@ -181,11 +181,9 @@ def _do_request( initialization of the object at a later time. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :rtype: tuple of ``response`` (a dictionary of sorts) and ``content`` (a string). diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index 66cc1d153..60178aa2e 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -65,8 +65,10 @@ from google.cloud.storage._helpers import _bucket_bound_hostname_url from google.cloud.storage._helpers import _convert_to_timestamp from google.cloud.storage._helpers import _raise_if_more_than_one_set +from google.cloud.storage._helpers import _api_core_retry_to_resumable_media_retry from google.cloud.storage._signing import generate_signed_url_v2 from google.cloud.storage._signing import generate_signed_url_v4 +from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE from google.cloud.storage.acl import ACL from google.cloud.storage.acl import ObjectACL from google.cloud.storage.constants import _DEFAULT_TIMEOUT @@ -76,9 +78,11 @@ from google.cloud.storage.constants import NEARLINE_STORAGE_CLASS from google.cloud.storage.constants import REGIONAL_LEGACY_STORAGE_CLASS from google.cloud.storage.constants import STANDARD_STORAGE_CLASS +from google.cloud.storage.retry import ConditionalRetryPolicy from google.cloud.storage.retry import DEFAULT_RETRY from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED +from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED from google.cloud.storage.fileio import BlobReader from google.cloud.storage.fileio import BlobWriter @@ -105,17 +109,6 @@ "name", "storageClass", ) -_NUM_RETRIES_MESSAGE = ( - "`num_retries` has been deprecated and will be removed in a future " - "release. The default behavior (when `num_retries` is not specified) when " - "a transient error (e.g. 429 Too Many Requests or 500 Internal Server " - "Error) occurs will be as follows: upload requests will be automatically " - "retried if and only if `if_metageneration_match` is specified (thus " - "making the upload idempotent). Subsequent retries will be sent after " - "waiting 1, 2, 4, 8, etc. seconds (exponential backoff) until 10 minutes " - "of wait time have elapsed. At that point, there will be no more attempts " - "to retry." -) _READ_LESS_THAN_SIZE = ( "Size {:d} was specified but the file-like object only had " "{:d} bytes remaining." ) @@ -123,6 +116,25 @@ "A checksum of type `{}` was requested, but checksumming is not available " "for downloads when chunk_size is set." ) +_COMPOSE_IF_GENERATION_LIST_DEPRECATED = ( + "'if_generation_match: type list' is deprecated and supported for " + "backwards-compatability reasons only. Use 'if_source_generation_match' " + "instead' to match source objects' generations.", +) +_COMPOSE_IF_GENERATION_LIST_AND_IF_SOURCE_GENERATION_ERROR = ( + "Use 'if_generation_match' to match the generation of the destination " + "object by passing in a generation number, instead of a list. " + "Use 'if_source_generation_match' to match source objects generations." +) +_COMPOSE_IF_METAGENERATION_LIST_DEPRECATED = ( + "'if_metageneration_match: type list' is deprecated and supported for " + "backwards-compatability reasons only. Note that the metageneration to " + "be matched is that of the destination blob. Please pass in a single " + "value (type long).", +) +_COMPOSE_IF_SOURCE_GENERATION_MISMATCH_ERROR = ( + "'if_source_generation_match' length must be the same as 'sources' length" +) _DEFAULT_CHUNKSIZE = 104857600 # 1024 * 1024 B * 100 = 100 MB @@ -623,11 +635,11 @@ def generate_signed_url( def exists( self, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, ): """Determines whether or not this blob exists. @@ -640,50 +652,30 @@ def exists( (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. - - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. + (Optional) See :ref:`using-if-metageneration-not-match` - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: bool :returns: True if the blob exists in Cloud Storage. @@ -704,29 +696,28 @@ def exists( try: # We intentionally pass `_target_object=None` since fields=name # would limit the local properties. - client._connection.api_request( - method="GET", - path=self.path, + client._get_resource( + self.path, query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) + except NotFound: # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. - return True - except NotFound: return False + return True def delete( self, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Deletes a blob from Cloud Storage. @@ -739,50 +730,30 @@ def delete( (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. - - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. + (Optional) See :ref:`using-if-metageneration-not-match` - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises: :class:`google.cloud.exceptions.NotFound` (propagated from @@ -835,25 +806,19 @@ def _get_download_url( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :rtype: str :returns: The download URL for the current blob. @@ -919,6 +884,7 @@ def _do_download( raw_download=False, timeout=_DEFAULT_TIMEOUT, checksum="md5", + retry=None, ): """Perform a download without any error handling. @@ -952,11 +918,8 @@ def _do_download( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -968,7 +931,25 @@ def _do_download( downloads where chunk_size is set) an INFO-level log will be emitted. Supported values are "md5", "crc32c" and None. The default is "md5". + + :type retry: google.api_core.retry.Retry + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will configure backoff and timeout options. Custom + predicates (customizable error codes) are not supported for media + operations such as this one. + + This private method does not accept ConditionalRetryPolicy values + because the information necessary to evaluate the policy is instead + evaluated in client.download_blob_to_file(). + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. """ + + retry_strategy = _api_core_retry_to_resumable_media_retry(retry) + if self.chunk_size is None: if raw_download: klass = RawDownload @@ -983,6 +964,7 @@ def _do_download( end=end, checksum=checksum, ) + download._retry_strategy = retry_strategy response = download.consume(transport, timeout=timeout) self._extract_headers_from_download(response) else: @@ -1005,6 +987,7 @@ def _do_download( end=end, ) + download._retry_strategy = retry_strategy while not download.finished: download.consume_next_chunk(transport, timeout=timeout) @@ -1021,6 +1004,7 @@ def download_to_file( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="md5", + retry=DEFAULT_RETRY, ): """DEPRECATED. Download the contents of this blob into a file-like object. @@ -1071,33 +1055,24 @@ def download_to_file( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -1110,6 +1085,28 @@ def download_to_file( emitted. Supported values are "md5", "crc32c" and None. The default is "md5". + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :raises: :class:`google.cloud.exceptions.NotFound` """ client = self._require_client(client) @@ -1126,6 +1123,7 @@ def download_to_file( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) def download_to_filename( @@ -1141,6 +1139,7 @@ def download_to_filename( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="md5", + retry=DEFAULT_RETRY, ): """Download the contents of this blob into a named file. @@ -1167,33 +1166,24 @@ def download_to_filename( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -1206,6 +1196,28 @@ def download_to_filename( emitted. Supported values are "md5", "crc32c" and None. The default is "md5". + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :raises: :class:`google.cloud.exceptions.NotFound` """ client = self._require_client(client) @@ -1223,6 +1235,7 @@ def download_to_filename( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) except resumable_media.DataCorruption: # Delete the corrupt downloaded file. @@ -1249,6 +1262,7 @@ def download_as_bytes( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="md5", + retry=DEFAULT_RETRY, ): """Download the contents of this blob as a bytes object. @@ -1272,33 +1286,24 @@ def download_as_bytes( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -1311,6 +1316,28 @@ def download_as_bytes( emitted. Supported values are "md5", "crc32c" and None. The default is "md5". + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :rtype: bytes :returns: The data stored in this blob. @@ -1330,6 +1357,7 @@ def download_as_bytes( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) return string_buffer.getvalue() @@ -1344,6 +1372,7 @@ def download_as_string( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, ): """(Deprecated) Download the contents of this blob as a bytes object. @@ -1370,33 +1399,46 @@ def download_as_string( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. :rtype: bytes :returns: The data stored in this blob. @@ -1419,6 +1461,7 @@ def download_as_string( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, + retry=retry, ) def download_as_text( @@ -1433,6 +1476,7 @@ def download_as_text( if_metageneration_match=None, if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, ): """Download the contents of this blob as text (*not* bytes). @@ -1461,34 +1505,46 @@ def download_as_text( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. :rtype: text :returns: The data stored in this blob, decoded to text. @@ -1503,6 +1559,7 @@ def download_as_text( if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, + retry=retry, ) if encoding is not None: @@ -1615,6 +1672,7 @@ def _do_multipart_upload( if_metageneration_not_match, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=None, ): """Perform a multipart upload. @@ -1656,34 +1714,24 @@ def _do_multipart_upload( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -1693,6 +1741,21 @@ def _do_multipart_upload( manually-set checksum value. Supported values are "md5", "crc32c" and None. The default is None. + :type retry: google.api_core.retry.Retry + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will configure backoff and timeout options. Custom + predicates (customizable error codes) are not supported for media + operations such as this one. + + This private method does not accept ConditionalRetryPolicy values + because the information necessary to evaluate the policy is instead + evaluated in client.download_blob_to_file(). + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the multipart upload request. @@ -1754,10 +1817,9 @@ def _do_multipart_upload( upload_url = _add_query_parameters(base_url, name_value_pairs) upload = MultipartUpload(upload_url, headers=headers, checksum=checksum) - if num_retries is not None: - upload._retry_strategy = resumable_media.RetryStrategy( - max_retries=num_retries - ) + upload._retry_strategy = _api_core_retry_to_resumable_media_retry( + retry, num_retries + ) response = upload.transmit( transport, data, object_metadata, content_type, timeout=timeout @@ -1781,6 +1843,7 @@ def _initiate_resumable_upload( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=None, ): """Initiate a resumable upload. @@ -1835,34 +1898,24 @@ def _initiate_resumable_upload( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -1874,6 +1927,21 @@ def _initiate_resumable_upload( delete the uploaded object automatically. Supported values are "md5", "crc32c" and None. The default is None. + :type retry: google.api_core.retry.Retry + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will configure backoff and timeout options. Custom + predicates (customizable error codes) are not supported for media + operations such as this one. + + This private method does not accept ConditionalRetryPolicy values + because the information necessary to evaluate the policy is instead + evaluated in client.download_blob_to_file(). + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + :rtype: tuple :returns: Pair of @@ -1938,10 +2006,9 @@ def _initiate_resumable_upload( upload_url, chunk_size, headers=headers, checksum=checksum ) - if num_retries is not None: - upload._retry_strategy = resumable_media.RetryStrategy( - max_retries=num_retries - ) + upload._retry_strategy = _api_core_retry_to_resumable_media_retry( + retry, num_retries + ) upload.initiate( transport, @@ -1969,6 +2036,7 @@ def _do_resumable_upload( if_metageneration_not_match, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=None, ): """Perform a resumable upload. @@ -2013,34 +2081,24 @@ def _do_resumable_upload( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2052,6 +2110,21 @@ def _do_resumable_upload( delete the uploaded object automatically. Supported values are "md5", "crc32c" and None. The default is None. + :type retry: google.api_core.retry.Retry + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will configure backoff and timeout options. Custom + predicates (customizable error codes) are not supported for media + operations such as this one. + + This private method does not accept ConditionalRetryPolicy values + because the information necessary to evaluate the policy is instead + evaluated in client.download_blob_to_file(). + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the final chunk is uploaded. @@ -2069,6 +2142,7 @@ def _do_resumable_upload( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) while not upload.finished: @@ -2095,6 +2169,7 @@ def _do_upload( if_metageneration_not_match, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=None, ): """Determine an upload strategy and then perform the upload. @@ -2140,34 +2215,24 @@ def _do_upload( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2182,19 +2247,45 @@ def _do_upload( attempting to delete the corrupted file. Supported values are "md5", "crc32c" and None. The default is None. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :rtype: dict :returns: The parsed JSON from the "200 OK" response. This will be the **only** response in the multipart case and it will be the **final** response in the resumable case. """ - if if_metageneration_match is None and num_retries is None: - # Uploads are only idempotent (safe to retry) if - # if_metageneration_match is set. If it is not set, the default - # num_retries should be 0. Note: Because retry logic for uploads is - # provided by the google-resumable-media-python package, it doesn't - # use the ConditionalRetryStrategy class used in other API calls in - # this library to solve this problem. - num_retries = 0 + + # Handle ConditionalRetryPolicy. + if isinstance(retry, ConditionalRetryPolicy): + # Conditional retries are designed for non-media calls, which change + # arguments into query_params dictionaries. Media operations work + # differently, so here we make a "fake" query_params to feed to the + # ConditionalRetryPolicy. + query_params = { + "ifGenerationMatch": if_generation_match, + "ifMetagenerationMatch": if_metageneration_match, + } + retry = retry.get_retry_policy_if_conditions_met(query_params=query_params) if size is not None and size <= _MAX_MULTIPART_SIZE: response = self._do_multipart_upload( @@ -2210,6 +2301,7 @@ def _do_upload( if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) else: response = self._do_resumable_upload( @@ -2225,6 +2317,7 @@ def _do_upload( if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) return response.json() @@ -2244,6 +2337,7 @@ def upload_from_file( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, ): """Upload the contents of this blob from a file-like object. @@ -2319,34 +2413,24 @@ def upload_from_file( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2361,6 +2445,28 @@ def upload_from_file( attempting to delete the corrupted file. Supported values are "md5", "crc32c" and None. The default is None. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the upload response returns an error status. @@ -2370,6 +2476,11 @@ def upload_from_file( """ if num_retries is not None: warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2) + # num_retries and retry are mutually exclusive. If num_retries is + # set and retry is exactly the default, then nullify retry for + # backwards compatibility. + if retry is DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED: + retry = None _maybe_rewind(file_obj, rewind=rewind) predefined_acl = ACL.validate_predefined(predefined_acl) @@ -2388,6 +2499,7 @@ def upload_from_file( if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) self._set_properties(created_json) except resumable_media.InvalidResponse as exc: @@ -2406,6 +2518,7 @@ def upload_from_filename( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, ): """Upload this blob's contents from the content of a named file. @@ -2456,34 +2569,24 @@ def upload_from_filename( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2497,6 +2600,28 @@ def upload_from_filename( google.resumable_media.common.DataCorruption on a mismatch and attempting to delete the corrupted file. Supported values are "md5", "crc32c" and None. The default is None. + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. """ content_type = self._get_content_type(content_type, filename=filename) @@ -2515,6 +2640,7 @@ def upload_from_filename( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) def upload_from_string( @@ -2530,6 +2656,7 @@ def upload_from_string( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum=None, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, ): """Upload contents of this blob from the provided string. @@ -2576,34 +2703,24 @@ def upload_from_string( :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Make the operation conditional on whether the blob's - current generation does not match the given value. If no live blob - exists, the precondition fails. Setting to 0 makes the operation - succeed only if there is a live version of the blob. + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration does not match the given value. + (Optional) See :ref:`using-if-metageneration-not-match` :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2617,6 +2734,28 @@ def upload_from_string( google.resumable_media.common.DataCorruption on a mismatch and attempting to delete the corrupted file. Supported values are "md5", "crc32c" and None. The default is None. + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. """ data = _to_bytes(data, encoding="utf-8") string_buffer = BytesIO(data) @@ -2633,6 +2772,7 @@ def upload_from_string( if_metageneration_not_match=if_metageneration_not_match, timeout=timeout, checksum=checksum, + retry=retry, ) def create_resumable_upload_session( @@ -2705,11 +2845,8 @@ def create_resumable_upload_session( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type checksum: str :param checksum: @@ -2795,25 +2932,12 @@ def get_iam_policy( :type timeout: float or tuple :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.api_core.iam.Policy` :returns: the policy instance, based on the resource returned from @@ -2829,13 +2953,12 @@ def get_iam_policy( if requested_policy_version is not None: query_params["optionsRequestedPolicyVersion"] = requested_policy_version - info = client._connection.api_request( - method="GET", - path="%s/iam" % (self.path,), + info = client._get_resource( + "%s/iam" % (self.path,), query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) return Policy.from_api_repr(info) @@ -2869,25 +2992,12 @@ def set_iam_policy( :type timeout: float or tuple :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.api_core.iam.Policy` :returns: the policy instance, based on the resource returned from @@ -2900,16 +3010,16 @@ def set_iam_policy( if self.user_project is not None: query_params["userProject"] = self.user_project + path = "{}/iam".format(self.path) resource = policy.to_api_repr() resource["resourceId"] = self.path - info = client._connection.api_request( - method="PUT", - path="%s/iam" % (self.path,), + info = client._put_resource( + path, + resource, query_params=query_params, - data=resource, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) return Policy.from_api_repr(info) @@ -2939,25 +3049,12 @@ def test_iam_permissions( :type timeout: float or tuple :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: list of string :returns: the permissions returned by the ``testIamPermissions`` API @@ -2970,37 +3067,48 @@ def test_iam_permissions( query_params["userProject"] = self.user_project path = "%s/iam/testPermissions" % (self.path,) - resp = client._connection.api_request( - method="GET", - path=path, + resp = client._get_resource( + path, query_params=query_params, timeout=timeout, retry=retry, + _target_object=None, ) return resp.get("permissions", []) - def make_public(self, client=None): + def make_public(self, client=None, timeout=_DEFAULT_TIMEOUT): """Update blob's ACL, granting read access to anonymous users. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + """ self.acl.all().grant_read() - self.acl.save(client=client) + self.acl.save(client=client, timeout=timeout) - def make_private(self, client=None): + def make_private(self, client=None, timeout=_DEFAULT_TIMEOUT): """Update blob's ACL, revoking read access for anonymous users. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. + + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` """ self.acl.all().revoke_read() - self.acl.save(client=client) + self.acl.save(client=client, timeout=timeout) def compose( self, @@ -3009,6 +3117,7 @@ def compose( timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_metageneration_match=None, + if_source_generation_match=None, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Concatenate source blobs into this one. @@ -3025,89 +3134,99 @@ def compose( ``client`` stored on the blob's bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - :type if_generation_match: list of long + :type if_generation_match: long :param if_generation_match: - (Optional) Make the operation conditional on whether the blob's - current generation matches the given value. Setting to 0 makes the - operation succeed only if there are no live versions of the blob. - The list must match ``sources`` item-to-item. + (Optional) Makes the operation conditional on whether the + destination object's current generation matches the given value. + Setting to 0 makes the operation succeed only if there are no live + versions of the object. + + .. note:: - :type if_metageneration_match: list of long + In a previous version, this argument worked identically to the + ``if_source_generation_match`` argument. For + backwards-compatibility reasons, if a list is passed in, + this argument will behave like ``if_source_generation_match`` + and also issue a DeprecationWarning. + + :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Make the operation conditional on whether the blob's - current metageneration matches the given value. The list must match - ``sources`` item-to-item. + (Optional) Makes the operation conditional on whether the + destination object's current metageneration matches the given + value. - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. + If a list of long is passed in, no match operation will be + performed. (Deprecated: type(list of long) is supported for + backwards-compatability reasons only.) - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. + :type if_source_generation_match: list of long + :param if_source_generation_match: + (Optional) Makes the operation conditional on whether the current + generation of each source blob matches the corresponding generation. + The list must match ``sources`` item-to-item. - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` Example: - Compose blobs using generation match preconditions. + Compose blobs using source generation match preconditions. >>> from google.cloud import storage >>> client = storage.Client() >>> bucket = client.bucket("bucket-name") >>> blobs = [bucket.blob("blob-name-1"), bucket.blob("blob-name-2")] - >>> if_generation_match = [None] * len(blobs) - >>> if_generation_match[0] = "123" # precondition for "blob-name-1" + >>> if_source_generation_match = [None] * len(blobs) + >>> if_source_generation_match[0] = "123" # precondition for "blob-name-1" >>> composed_blob = bucket.blob("composed-name") - >>> composed_blob.compose(blobs, if_generation_match) + >>> composed_blob.compose(blobs, if_source_generation_match=if_source_generation_match) """ sources_len = len(sources) - if if_generation_match is not None and len(if_generation_match) != sources_len: - raise ValueError( - "'if_generation_match' length must be the same as 'sources' length" + client = self._require_client(client) + query_params = {} + + if isinstance(if_generation_match, list): + warnings.warn( + _COMPOSE_IF_GENERATION_LIST_DEPRECATED, + DeprecationWarning, + stacklevel=2, ) - if ( - if_metageneration_match is not None - and len(if_metageneration_match) != sources_len - ): - raise ValueError( - "'if_metageneration_match' length must be the same as 'sources' length" + if if_source_generation_match is not None: + raise ValueError( + _COMPOSE_IF_GENERATION_LIST_AND_IF_SOURCE_GENERATION_ERROR + ) + + if_source_generation_match = if_generation_match + if_generation_match = None + + if isinstance(if_metageneration_match, list): + warnings.warn( + _COMPOSE_IF_METAGENERATION_LIST_DEPRECATED, + DeprecationWarning, + stacklevel=2, ) - client = self._require_client(client) - query_params = {} + if_metageneration_match = None - if self.user_project is not None: - query_params["userProject"] = self.user_project + if if_source_generation_match is None: + if_source_generation_match = [None] * sources_len + if len(if_source_generation_match) != sources_len: + raise ValueError(_COMPOSE_IF_SOURCE_GENERATION_MISMATCH_ERROR) source_objects = [] - for index, source in enumerate(sources): - source_object = {"name": source.name} + for source, source_generation in zip(sources, if_source_generation_match): + source_object = {"name": source.name, "generation": source.generation} preconditions = {} - if ( - if_generation_match is not None - and if_generation_match[index] is not None - ): - preconditions["ifGenerationMatch"] = if_generation_match[index] - - if ( - if_metageneration_match is not None - and if_metageneration_match[index] is not None - ): - preconditions["ifMetagenerationMatch"] = if_metageneration_match[index] + if source_generation is not None: + preconditions["ifGenerationMatch"] = source_generation if preconditions: source_object["objectPreconditions"] = preconditions @@ -3118,14 +3237,23 @@ def compose( "sourceObjects": source_objects, "destination": self._properties.copy(), } - api_response = client._connection.api_request( - method="POST", - path=self.path + "/compose", + + if self.user_project is not None: + query_params["userProject"] = self.user_project + + _add_generation_match_parameters( + query_params, + if_generation_match=if_generation_match, + if_metageneration_match=if_metageneration_match, + ) + + api_response = client._post_resource( + "{}/compose".format(self.path), + request, query_params=query_params, - data=request, - _target_object=self, timeout=timeout, retry=retry, + _target_object=self, ) self._set_properties(api_response) @@ -3134,7 +3262,6 @@ def rewrite( source, token=None, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, @@ -3143,6 +3270,7 @@ def rewrite( if_source_generation_not_match=None, if_source_metageneration_match=None, if_source_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Rewrite source blob into this one. @@ -3164,40 +3292,29 @@ def rewrite( (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. - :type timeout: float or tuple - :param timeout: - (Optional) The amount of time, in seconds, to wait for the server - response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: long :param if_generation_match: - (Optional) Makes the operation conditional on whether the - destination object's current generation matches the given value. - Setting to 0 makes the operation succeed only if there are no live - versions of the object. + (Optional) See :ref:`using-if-generation-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Makes the operation conditional on whether the - destination object's current generation does not match the given - value. If no live object exists, the precondition fails. Setting to - 0 makes the operation succeed only if there is a live version of - the object. + (Optional) See :ref:`using-if-generation-not-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Makes the operation conditional on whether the - destination object's current metageneration matches the given - value. + (Optional) See :ref:`using-if-metageneration-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Makes the operation conditional on whether the - destination object's current metageneration does not match the - given value. + (Optional) See :ref:`using-if-metageneration-not-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_source_generation_match: long :param if_source_generation_match: @@ -3219,19 +3336,14 @@ def rewrite( (Optional) Makes the operation conditional on whether the source object's current metageneration does not match the given value. - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: tuple :returns: ``(token, bytes_rewritten, total_bytes)``, where ``token`` @@ -3269,15 +3381,15 @@ def rewrite( if_source_metageneration_not_match=if_source_metageneration_not_match, ) - api_response = client._connection.api_request( - method="POST", - path=source.path + "/rewriteTo" + self.path, + path = "{}/rewriteTo{}".format(source.path, self.path) + api_response = client._post_resource( + path, + self._properties, query_params=query_params, - data=self._properties, headers=headers, - _target_object=self, timeout=timeout, retry=retry, + _target_object=self, ) rewritten = int(api_response["totalBytesRewritten"]) size = int(api_response["objectSize"]) @@ -3334,30 +3446,27 @@ def update_storage_class( :type if_generation_match: long :param if_generation_match: - (Optional) Makes the operation conditional on whether the - destination object's current generation matches the given value. - Setting to 0 makes the operation succeed only if there are no live - versions of the object. + (Optional) See :ref:`using-if-generation-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_generation_not_match: long :param if_generation_not_match: - (Optional) Makes the operation conditional on whether the - destination object's current generation does not match the given - value. If no live object exists, the precondition fails. Setting to - 0 makes the operation succeed only if there is a live version of - the object. + (Optional) See :ref:`using-if-generation-not-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_metageneration_match: long :param if_metageneration_match: - (Optional) Makes the operation conditional on whether the - destination object's current metageneration matches the given - value. + (Optional) See :ref:`using-if-metageneration-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_metageneration_not_match: long :param if_metageneration_not_match: - (Optional) Makes the operation conditional on whether the - destination object's current metageneration does not match the - given value. + (Optional) See :ref:`using-if-metageneration-not-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_source_generation_match: long :param if_source_generation_match: @@ -3381,25 +3490,12 @@ def update_storage_class( :type timeout: float or tuple :param timeout: - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ if new_class not in self.STORAGE_CLASSES: raise ValueError("Invalid storage class: %s" % (new_class,)) @@ -3505,12 +3601,30 @@ def open( newline mode" and writes use the system default. See the Python 'io' module documentation for 'io.TextIOWrapper' for details. - :param kwargs: Keyword arguments to pass to the underlying API calls. + :param kwargs: + Keyword arguments to pass to the underlying API calls. For both uploads and downloads, the following arguments are - supported: "if_generation_match", "if_generation_not_match", - "if_metageneration_match", "if_metageneration_not_match", "timeout". + supported: + + - ``if_generation_match`` + - ``if_generation_not_match`` + - ``if_metageneration_match`` + - ``if_metageneration_not_match`` + - ``timeout`` + - ``retry`` + For uploads only, the following additional arguments are supported: - "content_type", "num_retries", "predefined_acl", "checksum". + + - ``content_type`` + - ``num_retries`` + - ``predefined_acl`` + - ``checksum`` + + .. note:: + + ``num_retries`` is supported for backwards-compatibility + reasons only; please use ``retry`` with a Retry object or + ConditionalRetryPolicy instead. :returns: A 'BlobReader' or 'BlobWriter' from 'google.cloud.storage.fileio', or an 'io.TextIOWrapper' around one diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py index 889a65888..0dc4ef76d 100644 --- a/google/cloud/storage/bucket.py +++ b/google/cloud/storage/bucket.py @@ -17,14 +17,12 @@ import base64 import copy import datetime -import functools import json import warnings import six from six.moves.urllib.parse import urlsplit -from google.api_core import page_iterator from google.api_core import datetime_helpers from google.cloud._helpers import _datetime_to_rfc3339 from google.cloud._helpers import _NOW @@ -739,11 +737,9 @@ def exists( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the @@ -754,18 +750,8 @@ def exists( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: bool :returns: True if the bucket exists in Cloud Storage. @@ -786,20 +772,19 @@ def exists( try: # We intentionally pass `_target_object=None` since fields=name # would limit the local properties. - client._connection.api_request( - method="GET", - path=self.path, + client._get_resource( + self.path, query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) + except NotFound: # NOTE: This will not fail immediately in a batch. However, when # Batch.finish() is called, the resulting `NotFound` will be # raised. - return True - except NotFound: return False + return True def create( self, @@ -851,25 +836,13 @@ def create( https://cloud.google.com/storage/docs/access-control/lists#predefined-acl :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ warnings.warn( "Bucket.create() is deprecated and will be removed in future." @@ -910,11 +883,9 @@ def update( ``client`` stored on the current object. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the @@ -925,18 +896,8 @@ def update( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ super(Bucket, self).update( client=client, @@ -970,11 +931,9 @@ def reload( properties to return. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the @@ -985,18 +944,8 @@ def reload( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ super(Bucket, self).reload( client=client, @@ -1027,11 +976,9 @@ def patch( ``client`` stored on the current object. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the @@ -1042,18 +989,8 @@ def patch( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ # Special case: For buckets, it is possible that labels are being # removed; this requires special handling. @@ -1066,9 +1003,9 @@ def patch( # Call the superclass method. super(Bucket, self).patch( client=client, - timeout=timeout, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, retry=retry, ) @@ -1108,11 +1045,11 @@ def get_blob( client=None, encryption_key=None, generation=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, **kwargs ): @@ -1142,50 +1079,33 @@ def get_blob( https://cloud.google.com/storage/docs/encryption#customer-supplied. :type generation: long - :param generation: (Optional) If present, selects a specific revision of - this object. - - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param generation: + (Optional) If present, selects a specific revision of this object. :type if_generation_match: long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. - - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :param kwargs: Keyword arguments to pass to the :class:`~google.cloud.storage.blob.Blob` constructor. @@ -1303,25 +1223,13 @@ def list_blobs( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: Iterator of all :class:`~google.cloud.storage.blob.Blob` @@ -1368,39 +1276,21 @@ def list_notifications( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: list of :class:`.BucketNotification` :returns: notification instances """ client = self._require_client(client) path = self.path + "/notificationConfigs" - api_request = functools.partial( - client._connection.api_request, timeout=timeout, retry=retry - ) - iterator = page_iterator.HTTPIterator( - client=client, - api_request=api_request, - path=path, - item_to_value=_item_to_notification, + iterator = client._list_resource( + path, _item_to_notification, timeout=timeout, retry=retry, ) iterator.bucket = self return iterator @@ -1427,25 +1317,13 @@ def get_notification( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`.BucketNotification` :returns: notification instance. @@ -1467,9 +1345,9 @@ def delete( self, force=False, client=None, - timeout=_DEFAULT_TIMEOUT, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY, ): """Delete this bucket. @@ -1497,13 +1375,6 @@ def delete( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response on each request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the blob's current metageneration matches the given value. @@ -1512,19 +1383,14 @@ def delete( :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises: :class:`ValueError` if ``force`` is ``True`` and the bucket contains more than 256 objects / blobs. @@ -1546,6 +1412,7 @@ def delete( max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, client=client, timeout=timeout, + retry=retry, ) ) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: @@ -1559,19 +1426,22 @@ def delete( # Ignore 404 errors on delete. self.delete_blobs( - blobs, on_error=lambda blob: None, client=client, timeout=timeout + blobs, + on_error=lambda blob: None, + client=client, + timeout=timeout, + retry=retry, ) # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). - client._connection.api_request( - method="DELETE", - path=self.path, + client._delete_resource( + self.path, query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) def delete_blob( @@ -1579,11 +1449,11 @@ def delete_blob( blob_name, client=None, generation=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Deletes a blob from the current bucket. @@ -1612,47 +1482,30 @@ def delete_blob( :param generation: (Optional) If present, permanently deletes a specific revision of this object. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises: :class:`google.cloud.exceptions.NotFound` (to suppress the exception, call ``delete_blobs``, passing a no-op @@ -1678,13 +1531,12 @@ def delete_blob( # We intentionally pass `_target_object=None` since a DELETE # request has no response value (whether in a standard request or # in a batch request). - client._connection.api_request( - method="DELETE", - path=blob.path, + client._delete_resource( + blob.path, query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) def delete_blobs( @@ -1719,52 +1571,35 @@ def delete_blobs( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each individual - blob delete request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: list of long - :param if_generation_match: (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. The list must match - ``blobs`` item-to-item. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + Note that the length of the list must match the length of + The list must match ``blobs`` item-to-item. :type if_generation_not_match: list of long - :param if_generation_not_match: (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. The list must match - ``blobs`` item-to-item. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + The list must match ``blobs`` item-to-item. :type if_metageneration_match: list of long - :param if_metageneration_match: (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. - The list must match ``blobs`` item-to-item. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + The list must match ``blobs`` item-to-item. :type if_metageneration_not_match: list of long - :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. - The list must match ``blobs`` item-to-item. - - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + The list must match ``blobs`` item-to-item. - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises: :class:`~google.cloud.exceptions.NotFound` (if `on_error` is not passed). @@ -1803,11 +1638,11 @@ def delete_blobs( self.delete_blob( blob_name, client=client, - timeout=timeout, if_generation_match=next(if_generation_match, None), if_generation_not_match=next(if_generation_not_match, None), if_metageneration_match=next(if_metageneration_match, None), if_metageneration_not_match=next(if_metageneration_not_match, None), + timeout=timeout, retry=retry, ) except NotFound: @@ -1824,7 +1659,6 @@ def copy_blob( client=None, preserve_acl=True, source_generation=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, @@ -1833,6 +1667,7 @@ def copy_blob( if_source_generation_not_match=None, if_source_metageneration_match=None, if_source_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Copy the given blob to the given bucket, optionally with a new name. @@ -1863,83 +1698,58 @@ def copy_blob( :param source_generation: (Optional) The generation of the blob to be copied. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: long - :param if_generation_match: (Optional) Makes the operation - conditional on whether the destination - object's current generation matches the - given value. Setting to 0 makes the - operation succeed only if there are no - live versions of the object. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Makes the operation - conditional on whether the - destination object's current - generation does not match the given - value. If no live object exists, - the precondition fails. Setting to - 0 makes the operation succeed only - if there is a live version - of the object. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Makes the operation - conditional on whether the - destination object's current - metageneration matches the given - value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Makes the operation - conditional on whether the - destination object's current - metageneration does not match - the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_source_generation_match: long - :param if_source_generation_match: (Optional) Makes the operation - conditional on whether the source - object's generation matches the - given value. + :param if_source_generation_match: + (Optional) Makes the operation conditional on whether the source + object's generation matches the given value. :type if_source_generation_not_match: long - :param if_source_generation_not_match: (Optional) Makes the operation - conditional on whether the source - object's generation does not match - the given value. + :param if_source_generation_not_match: + (Optional) Makes the operation conditional on whether the source + object's generation does not match the given value. :type if_source_metageneration_match: long - :param if_source_metageneration_match: (Optional) Makes the operation - conditional on whether the source - object's current metageneration - matches the given value. + :param if_source_metageneration_match: + (Optional) Makes the operation conditional on whether the source + object's current metageneration matches the given value. :type if_source_metageneration_not_match: long - :param if_source_metageneration_not_match: (Optional) Makes the operation - conditional on whether the source - object's current metageneration - does not match the given value. + :param if_source_metageneration_not_match: + (Optional) Makes the operation conditional on whether the source + object's current metageneration does not match the given value. - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The new Blob. @@ -1984,13 +1794,13 @@ def copy_blob( new_blob = Blob(bucket=destination_bucket, name=new_name) api_path = blob.path + "/copyTo" + new_blob.path - copy_result = client._connection.api_request( - method="POST", - path=api_path, + copy_result = client._post_resource( + api_path, + None, query_params=query_params, - _target_object=new_blob, timeout=timeout, retry=retry, + _target_object=new_blob, ) if not preserve_acl: @@ -2004,7 +1814,6 @@ def rename_blob( blob, new_name, client=None, - timeout=_DEFAULT_TIMEOUT, if_generation_match=None, if_generation_not_match=None, if_metageneration_match=None, @@ -2013,6 +1822,7 @@ def rename_blob( if_source_generation_not_match=None, if_source_metageneration_match=None, if_source_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ): """Rename the given blob using copy and delete operations. @@ -2042,88 +1852,62 @@ def rename_blob( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. - :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each individual - request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - :type if_generation_match: long - :param if_generation_match: (Optional) Makes the operation - conditional on whether the destination - object's current generation matches the - given value. Setting to 0 makes the - operation succeed only if there are no - live versions of the object. + :param if_generation_match: + (Optional) See :ref:`using-if-generation-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_generation_not_match: long - :param if_generation_not_match: (Optional) Makes the operation - conditional on whether the - destination object's current - generation does not match the given - value. If no live object exists, - the precondition fails. Setting to - 0 makes the operation succeed only - if there is a live version - of the object. + :param if_generation_not_match: + (Optional) See :ref:`using-if-generation-not-match` + Note that the generation to be matched is that of the + ``destination`` blob. :type if_metageneration_match: long - :param if_metageneration_match: (Optional) Makes the operation - conditional on whether the - destination object's current - metageneration matches the given - value. + :param if_metageneration_match: + (Optional) See :ref:`using-if-metageneration-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_metageneration_not_match: long - :param if_metageneration_not_match: (Optional) Makes the operation - conditional on whether the - destination object's current - metageneration does not match - the given value. + :param if_metageneration_not_match: + (Optional) See :ref:`using-if-metageneration-not-match` + Note that the metageneration to be matched is that of the + ``destination`` blob. :type if_source_generation_match: long - :param if_source_generation_match: (Optional) Makes the operation - conditional on whether the source - object's generation matches the - given value. Also used in the - delete request. + :param if_source_generation_match: + (Optional) Makes the operation conditional on whether the source + object's generation matches the given value. Also used in the + (implied) delete request. :type if_source_generation_not_match: long - :param if_source_generation_not_match: (Optional) Makes the operation - conditional on whether the source - object's generation does not match - the given value. Also used in the - delete request. + :param if_source_generation_not_match: + (Optional) Makes the operation conditional on whether the source + object's generation does not match the given value. Also used in + the (implied) delete request. :type if_source_metageneration_match: long - :param if_source_metageneration_match: (Optional) Makes the operation - conditional on whether the source - object's current metageneration - matches the given value.Also used in the - delete request. + :param if_source_metageneration_match: + (Optional) Makes the operation conditional on whether the source + object's current metageneration matches the given value. Also used + in the (implied) delete request. :type if_source_metageneration_not_match: long - :param if_source_metageneration_not_match: (Optional) Makes the operation - conditional on whether the source - object's current metageneration - does not match the given value. - Also used in the delete request. - - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. + :param if_source_metageneration_not_match: + (Optional) Makes the operation conditional on whether the source + object's current metageneration does not match the given value. + Also used in the (implied) delete request. - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. + :type timeout: float or tuple + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`Blob` :returns: The newly-renamed blob. @@ -2361,7 +2145,13 @@ def lifecycle_rules(self): elif action_type == "SetStorageClass": yield LifecycleRuleSetStorageClass.from_api_repr(rule) else: - raise ValueError("Unknown lifecycle rule: {}".format(rule)) + warnings.warn( + "Unknown lifecycle rule type received: {}. Please upgrade to the latest version of google-cloud-storage.".format( + rule + ), + UserWarning, + stacklevel=1, + ) @lifecycle_rules.setter def lifecycle_rules(self, rules): @@ -2819,25 +2609,13 @@ def get_iam_policy( feature syntax in the policy fetched. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.api_core.iam.Policy` :returns: the policy instance, based on the resource returned from @@ -2876,13 +2654,12 @@ def get_iam_policy( if requested_policy_version is not None: query_params["optionsRequestedPolicyVersion"] = requested_policy_version - info = client._connection.api_request( - method="GET", - path="%s/iam" % (self.path,), + info = client._get_resource( + "%s/iam" % (self.path,), query_params=query_params, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) return Policy.from_api_repr(info) @@ -2909,25 +2686,13 @@ def set_iam_policy( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.api_core.iam.Policy` :returns: the policy instance, based on the resource returned from @@ -2939,17 +2704,19 @@ def set_iam_policy( if self.user_project is not None: query_params["userProject"] = self.user_project + path = "{}/iam".format(self.path) resource = policy.to_api_repr() resource["resourceId"] = self.path - info = client._connection.api_request( - method="PUT", - path="%s/iam" % (self.path,), + + info = client._put_resource( + path, + resource, query_params=query_params, - data=resource, - _target_object=None, timeout=timeout, retry=retry, + _target_object=None, ) + return Policy.from_api_repr(info) def test_iam_permissions( @@ -2971,25 +2738,13 @@ def test_iam_permissions( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: list of string :returns: the permissions returned by the ``testIamPermissions`` API @@ -3002,22 +2757,17 @@ def test_iam_permissions( query_params["userProject"] = self.user_project path = "%s/iam/testPermissions" % (self.path,) - resp = client._connection.api_request( - method="GET", - path=path, + resp = client._get_resource( + path, query_params=query_params, timeout=timeout, retry=retry, + _target_object=None, ) return resp.get("permissions", []) def make_public( - self, - recursive=False, - future=False, - client=None, - timeout=_DEFAULT_TIMEOUT, - retry=DEFAULT_RETRY, + self, recursive=False, future=False, client=None, timeout=_DEFAULT_TIMEOUT, ): """Update bucket's ACL, granting read access to anonymous users. @@ -3034,26 +2784,9 @@ def make_public( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each underlying - request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :raises ValueError: If ``recursive`` is True, and the bucket contains more than 256 @@ -3080,7 +2813,6 @@ def make_public( max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, client=client, timeout=timeout, - retry=retry, ) ) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: @@ -3098,12 +2830,7 @@ def make_public( blob.acl.save(client=client, timeout=timeout) def make_private( - self, - recursive=False, - future=False, - client=None, - timeout=_DEFAULT_TIMEOUT, - retry=DEFAULT_RETRY, + self, recursive=False, future=False, client=None, timeout=_DEFAULT_TIMEOUT, ): """Update bucket's ACL, revoking read access for anonymous users. @@ -3121,26 +2848,9 @@ def make_private( to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. The timeout applies to each underlying - request. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. - - :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :raises ValueError: If ``recursive`` is True, and the bucket contains more than 256 @@ -3167,7 +2877,6 @@ def make_private( max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, client=client, timeout=timeout, - retry=retry, ) ) if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: @@ -3220,7 +2929,7 @@ def generate_upload_policy(self, conditions, expiration=None, client=None): to attach the signature. """ client = self._require_client(client) - credentials = client._base_connection.credentials + credentials = client._credentials _signing.ensure_signed_credentials(credentials) if expiration is None: @@ -3258,25 +2967,13 @@ def lock_retention_policy( to the ``client`` stored on the blob's bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises ValueError: if the bucket has no metageneration (i.e., new or never reloaded); @@ -3302,13 +2999,13 @@ def lock_retention_policy( query_params["userProject"] = self.user_project path = "/b/{}/lockRetentionPolicy".format(self.name) - api_response = client._connection.api_request( - method="POST", - path=path, + api_response = client._post_resource( + path, + None, query_params=query_params, - _target_object=self, timeout=timeout, retry=retry, + _target_object=self, ) self._set_properties(api_response) diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 858fecdce..d6f688d92 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -53,6 +53,7 @@ from google.cloud.storage.acl import DefaultObjectACL from google.cloud.storage.constants import _DEFAULT_TIMEOUT from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import ConditionalRetryPolicy _marker = object() @@ -259,35 +260,22 @@ def get_service_account_email( (Optional) Project ID to use for retreiving GCS service account email address. Defaults to the client's project. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: str :returns: service account email address """ if project is None: project = self.project + path = "/projects/%s/serviceAccount" % (project,) - api_response = self._base_connection.api_request( - method="GET", path=path, timeout=timeout, retry=retry, - ) + api_response = self._get_resource(path, timeout=timeout, retry=retry) return api_response["email_address"] def bucket(self, bucket_name, user_project=None): @@ -321,6 +309,376 @@ def batch(self): """ return Batch(client=self) + def _get_resource( + self, + path, + query_params=None, + headers=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + _target_object=None, + ): + """Helper for bucket / blob methods making API 'GET' calls. + + Args: + path str: + The path of the resource to fetch. + + query_params Optional[dict]: + HTTP query parameters to be passed + + headers Optional[dict]: + HTTP headers to be passed + + timeout (Optional[Union[float, Tuple[float, float]]]): + The amount of time, in seconds, to wait for the server response. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + retry (Optional[Union[google.api_core.retry.Retry, google.cloud.storage.retry.ConditionalRetryPolicy]]): + How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + + _target_object (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + :class:`~google.cloud.storage.bucket.blob`, \ + ]): + Object to which future data is to be applied -- only relevant + in the context of a batch. + + Returns: + dict + The JSON resource fetched + + Raises: + google.cloud.exceptions.NotFound + If the bucket is not found. + """ + return self._connection.api_request( + method="GET", + path=path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=_target_object, + ) + + def _list_resource( + self, + path, + item_to_value, + page_token=None, + max_results=None, + extra_params=None, + page_start=page_iterator._do_nothing_page_start, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + ): + api_request = functools.partial( + self._connection.api_request, timeout=timeout, retry=retry + ) + return page_iterator.HTTPIterator( + client=self, + api_request=api_request, + path=path, + item_to_value=item_to_value, + page_token=page_token, + max_results=max_results, + extra_params=extra_params, + page_start=page_start, + ) + + def _patch_resource( + self, + path, + data, + query_params=None, + headers=None, + timeout=_DEFAULT_TIMEOUT, + retry=None, + _target_object=None, + ): + """Helper for bucket / blob methods making API 'PATCH' calls. + + Args: + path str: + The path of the resource to fetch. + + data dict: + The data to be patched. + + query_params Optional[dict]: + HTTP query parameters to be passed + + headers Optional[dict]: + HTTP headers to be passed + + timeout (Optional[Union[float, Tuple[float, float]]]): + The amount of time, in seconds, to wait for the server response. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + retry (Optional[Union[google.api_core.retry.Retry, google.cloud.storage.retry.ConditionalRetryPolicy]]): + How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + + _target_object (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + :class:`~google.cloud.storage.bucket.blob`, \ + ]): + Object to which future data is to be applied -- only relevant + in the context of a batch. + + Returns: + dict + The JSON resource fetched + + Raises: + google.cloud.exceptions.NotFound + If the bucket is not found. + """ + return self._connection.api_request( + method="PATCH", + path=path, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=_target_object, + ) + + def _put_resource( + self, + path, + data, + query_params=None, + headers=None, + timeout=_DEFAULT_TIMEOUT, + retry=None, + _target_object=None, + ): + """Helper for bucket / blob methods making API 'PUT' calls. + + Args: + path str: + The path of the resource to fetch. + + data dict: + The data to be patched. + + query_params Optional[dict]: + HTTP query parameters to be passed + + headers Optional[dict]: + HTTP headers to be passed + + timeout (Optional[Union[float, Tuple[float, float]]]): + The amount of time, in seconds, to wait for the server response. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + retry (Optional[Union[google.api_core.retry.Retry, google.cloud.storage.retry.ConditionalRetryPolicy]]): + How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + + _target_object (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + :class:`~google.cloud.storage.bucket.blob`, \ + ]): + Object to which future data is to be applied -- only relevant + in the context of a batch. + + Returns: + dict + The JSON resource fetched + + Raises: + google.cloud.exceptions.NotFound + If the bucket is not found. + """ + return self._connection.api_request( + method="PUT", + path=path, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=_target_object, + ) + + def _post_resource( + self, + path, + data, + query_params=None, + headers=None, + timeout=_DEFAULT_TIMEOUT, + retry=None, + _target_object=None, + ): + """Helper for bucket / blob methods making API 'POST' calls. + + Args: + path str: + The path of the resource to which to post. + + data dict: + The data to be posted. + + query_params Optional[dict]: + HTTP query parameters to be passed + + headers Optional[dict]: + HTTP headers to be passed + + timeout (Optional[Union[float, Tuple[float, float]]]): + The amount of time, in seconds, to wait for the server response. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + retry (Optional[Union[google.api_core.retry.Retry, google.cloud.storage.retry.ConditionalRetryPolicy]]): + How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + + _target_object (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + :class:`~google.cloud.storage.bucket.blob`, \ + ]): + Object to which future data is to be applied -- only relevant + in the context of a batch. + + Returns: + dict + The JSON resource returned from the post. + + Raises: + google.cloud.exceptions.NotFound + If the bucket is not found. + """ + return self._connection.api_request( + method="POST", + path=path, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=_target_object, + ) + + def _delete_resource( + self, + path, + query_params=None, + headers=None, + timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, + _target_object=None, + ): + """Helper for bucket / blob methods making API 'DELETE' calls. + + Args: + path str: + The path of the resource to delete. + + query_params Optional[dict]: + HTTP query parameters to be passed + + headers Optional[dict]: + HTTP headers to be passed + + timeout (Optional[Union[float, Tuple[float, float]]]): + The amount of time, in seconds, to wait for the server response. + + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + + retry (Optional[Union[google.api_core.retry.Retry, google.cloud.storage.retry.ConditionalRetryPolicy]]): + How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. + + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. + + _target_object (Union[ \ + :class:`~google.cloud.storage.bucket.Bucket`, \ + :class:`~google.cloud.storage.bucket.blob`, \ + ]): + Object to which future data is to be applied -- only relevant + in the context of a batch. + + Returns: + dict + The JSON resource fetched + + Raises: + google.cloud.exceptions.NotFound + If the bucket is not found. + """ + return self._connection.api_request( + method="DELETE", + path=path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=_target_object, + ) + def get_bucket( self, bucket_or_name, @@ -430,11 +788,9 @@ def lookup_bucket( :param bucket_name: The name of the bucket to get. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type if_metageneration_match: long :param if_metageneration_match: (Optional) Make the operation conditional on whether the @@ -445,18 +801,8 @@ def lookup_bucket( blob's current metageneration does not match the given value. :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`google.cloud.storage.bucket.Bucket` :returns: The bucket matching the name provided or None if not found. @@ -602,14 +948,13 @@ def create_bucket( if location is not None: properties["location"] = location - api_response = self._connection.api_request( - method="POST", - path="/b", + api_response = self._post_resource( + "/b", + properties, query_params=query_params, - data=properties, - _target_object=bucket, timeout=timeout, retry=retry, + _target_object=bucket, ) bucket._set_properties(api_response) @@ -628,6 +973,7 @@ def download_blob_to_file( if_metageneration_not_match=None, timeout=_DEFAULT_TIMEOUT, checksum="md5", + retry=DEFAULT_RETRY, ): """Download the contents of a blob object or blob URI into a file-like object. @@ -637,37 +983,35 @@ def download_blob_to_file( str, \ ]): The blob resource to pass or URI to download. + file_obj (file): A file handle to which to write the blob's data. + start (int): (Optional) The first byte in a range to be downloaded. + end (int): (Optional) The last byte in a range to be downloaded. + raw_download (bool): (Optional) If true, download the object without any expansion. - if_generation_match (long): - (Optional) Make the operation conditional on whether - the blob's current generation matches the given value. - Setting to 0 makes the operation succeed only if there - are no live versions of the blob. - if_generation_not_match (long): - (Optional) Make the operation conditional on whether - the blob's current generation does not match the given - value. If no live blob exists, the precondition fails. - Setting to 0 makes the operation succeed only if there - is a live version of the blob. - if_metageneration_match (long): - (Optional) Make the operation conditional on whether the - blob's current metageneration matches the given value. - if_metageneration_not_match (long): - (Optional) Make the operation conditional on whether the - blob's current metageneration does not match the given value. + + if_generation_match: long + (Optional) See :ref:`using-if-generation-match` + + if_generation_not_match: long + (Optional) See :ref:`using-if-generation-not-match` + + if_metageneration_match: long + (Optional) See :ref:`using-if-metageneration-match` + + if_metageneration_not_match: long + (Optional) See :ref:`using-if-metageneration-not-match` + timeout ([Union[float, Tuple[float, float]]]): - (Optional) The number of seconds the transport should wait for the - server response. Depending on the retry strategy, a request may be - repeated several times using the same timeout each time. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + checksum (str): (Optional) The type of checksum to compute to verify the integrity of the object. The response headers must contain a checksum of the @@ -677,6 +1021,27 @@ def download_blob_to_file( downloads where chunk_size is set) an INFO-level log will be emitted. Supported values are "md5", "crc32c" and None. The default is "md5". + retry (google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy) + (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. Examples: Download a blob using a blob resource. @@ -702,6 +1067,19 @@ def download_blob_to_file( """ + + # Handle ConditionalRetryPolicy. + if isinstance(retry, ConditionalRetryPolicy): + # Conditional retries are designed for non-media calls, which change + # arguments into query_params dictionaries. Media operations work + # differently, so here we make a "fake" query_params to feed to the + # ConditionalRetryPolicy. + query_params = { + "ifGenerationMatch": if_generation_match, + "ifMetagenerationMatch": if_metageneration_match, + } + retry = retry.get_retry_policy_if_conditions_met(query_params=query_params) + if not isinstance(blob_or_uri, Blob): blob_or_uri = Blob.from_string(blob_or_uri) download_url = blob_or_uri._get_download_url( @@ -726,6 +1104,7 @@ def download_blob_to_file( raw_download, timeout=timeout, checksum=checksum, + retry=retry, ) except resumable_media.InvalidResponse as exc: _raise_from_invalid_response(exc) @@ -871,18 +1250,15 @@ def list_blobs( extra_params["userProject"] = bucket.user_project path = bucket.path + "/o" - api_request = functools.partial( - self._connection.api_request, timeout=timeout, retry=retry - ) - iterator = page_iterator.HTTPIterator( - client=self, - api_request=api_request, - path=path, - item_to_value=_item_to_blob, + iterator = self._list_resource( + path, + _item_to_blob, page_token=page_token, max_results=max_results, extra_params=extra_params, page_start=_blobs_page_start, + timeout=timeout, + retry=retry, ) iterator.bucket = bucket iterator.prefixes = set() @@ -943,25 +1319,13 @@ def list_buckets( If not passed, uses the project set on the client. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: :class:`~google.api_core.page_iterator.Iterator` :raises ValueError: if both ``project`` is ``None`` and the client's @@ -985,18 +1349,14 @@ def list_buckets( if fields is not None: extra_params["fields"] = fields - api_request = functools.partial( - self._connection.api_request, retry=retry, timeout=timeout - ) - - return page_iterator.HTTPIterator( - client=self, - api_request=api_request, - path="/b", - item_to_value=_item_to_bucket, + return self._list_resource( + "/b", + _item_to_bucket, page_token=page_token, max_results=max_results, extra_params=extra_params, + timeout=timeout, + retry=retry, ) def create_hmac_key( @@ -1005,6 +1365,7 @@ def create_hmac_key( project_id=None, user_project=None, timeout=_DEFAULT_TIMEOUT, + retry=None, ): """Create an HMAC key for a service account. @@ -1019,11 +1380,23 @@ def create_hmac_key( :param user_project: (Optional) This parameter is currently ignored. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` + + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: (Optional) How to retry the RPC. A None value will disable retries. + A google.api_core.retry.Retry value will enable retries, and the object will + define retriable response codes and errors and configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and + activates it only if certain conditions are met. This class exists to provide safe defaults + for RPC calls that are not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a condition such as + if_metageneration_match is set. - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for + information on retry types and how to configure them. :rtype: Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str] @@ -1038,12 +1411,8 @@ def create_hmac_key( if user_project is not None: qs_params["userProject"] = user_project - api_response = self._connection.api_request( - method="POST", - path=path, - query_params=qs_params, - timeout=timeout, - retry=None, + api_response = self._post_resource( + path, None, query_params=qs_params, timeout=timeout, retry=retry, ) metadata = HMACKeyMetadata(self) metadata._properties = api_response["metadata"] @@ -1083,25 +1452,13 @@ def list_hmac_keys( :param user_project: (Optional) This parameter is currently ignored. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: Tuple[:class:`~google.cloud.storage.hmac_key.HMACKeyMetadata`, str] @@ -1122,17 +1479,13 @@ def list_hmac_keys( if user_project is not None: extra_params["userProject"] = user_project - api_request = functools.partial( - self._connection.api_request, timeout=timeout, retry=retry - ) - - return page_iterator.HTTPIterator( - client=self, - api_request=api_request, - path=path, - item_to_value=_item_to_hmac_key_metadata, + return self._list_resource( + path, + _item_to_hmac_key_metadata, max_results=max_results, extra_params=extra_params, + timeout=timeout, + retry=retry, ) def get_hmac_key_metadata( @@ -1148,11 +1501,9 @@ def get_hmac_key_metadata( Defaults to client's project. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type user_project: str :param user_project: (Optional) This parameter is currently ignored. diff --git a/google/cloud/storage/fileio.py b/google/cloud/storage/fileio.py index 53d3d14ab..6ac8e057f 100644 --- a/google/cloud/storage/fileio.py +++ b/google/cloud/storage/fileio.py @@ -13,8 +13,14 @@ # limitations under the License. import io +import warnings from google.api_core.exceptions import RequestRangeNotSatisfiable +from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE +from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED +from google.cloud.storage.retry import ConditionalRetryPolicy + # Resumable uploads require a chunk size of precisely a multiple of 256 KiB. CHUNK_SIZE_MULTIPLE = 256 * 1024 # 256 KiB @@ -28,20 +34,22 @@ "if_metageneration_match", "if_metageneration_not_match", "timeout", + "retry", } # Valid keyword arguments for upload methods. # Note: Changes here need to be reflected in the blob.open() docstring. VALID_UPLOAD_KWARGS = { "content_type", - "num_retries", "predefined_acl", + "num_retries", "if_generation_match", "if_generation_not_match", "if_metageneration_match", "if_metageneration_not_match", "timeout", "checksum", + "retry", } @@ -58,13 +66,41 @@ class BlobReader(io.BufferedIOBase): bytes than the chunk_size are requested, the remainder is buffered. The default is the chunk_size of the blob, or 40MiB. - :param download_kwargs: Keyword arguments to pass to the underlying API - calls. The following arguments are supported: "if_generation_match", - "if_generation_not_match", "if_metageneration_match", - "if_metageneration_not_match", "timeout". + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + + :param download_kwargs: + Keyword arguments to pass to the underlying API calls. + The following arguments are supported: + + - ``if_generation_match`` + - ``if_generation_not_match`` + - ``if_metageneration_match`` + - ``if_metageneration_not_match`` + - ``timeout`` """ - def __init__(self, blob, chunk_size=None, **download_kwargs): + def __init__(self, blob, chunk_size=None, retry=DEFAULT_RETRY, **download_kwargs): """docstring note that download_kwargs also used for reload()""" for kwarg in download_kwargs: if kwarg not in VALID_DOWNLOAD_KWARGS: @@ -76,6 +112,7 @@ def __init__(self, blob, chunk_size=None, **download_kwargs): self._pos = 0 self._buffer = io.BytesIO() self._chunk_size = chunk_size or blob.chunk_size or DEFAULT_CHUNK_SIZE + self._retry = retry self._download_kwargs = download_kwargs def read(self, size=-1): @@ -102,6 +139,7 @@ def read(self, size=-1): start=fetch_start, end=fetch_end, checksum=None, + retry=self._retry, **self._download_kwargs ) except RequestRangeNotSatisfiable: @@ -197,14 +235,52 @@ class BlobWriter(io.BufferedIOBase): changes the behavior of flush() to conform to TextIOWrapper's expectations. - :param upload_kwargs: Keyword arguments to pass to the underlying API - calls. The following arguments are supported: "if_generation_match", - "if_generation_not_match", "if_metageneration_match", - "if_metageneration_not_match", "timeout", "content_type", - "num_retries", "predefined_acl", "checksum". + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. A None value will disable + retries. A google.api_core.retry.Retry value will enable retries, + and the object will define retriable response codes and errors and + configure backoff and timeout options. + + A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a + Retry object and activates it only if certain conditions are met. + This class exists to provide safe defaults for RPC calls that are + not technically safe to retry normally (due to potential data + duplication or other side-effects) but become safe to retry if a + condition such as if_metageneration_match is set. + + See the retry.py source code and docstrings in this package + (google.cloud.storage.retry) for information on retry types and how + to configure them. + + Media operations (downloads and uploads) do not support non-default + predicates in a Retry object. The default will always be used. Other + configuration changes for Retry objects such as delays and deadlines + are respected. + + :param upload_kwargs: + Keyword arguments to pass to the underlying API + calls. The following arguments are supported: + + - ``if_generation_match`` + - ``if_generation_not_match`` + - ``if_metageneration_match`` + - ``if_metageneration_not_match`` + - ``timeout`` + - ``content_type`` + - ``num_retries`` + - ``predefined_acl`` + - ``checksum`` """ - def __init__(self, blob, chunk_size=None, text_mode=False, **upload_kwargs): + def __init__( + self, + blob, + chunk_size=None, + text_mode=False, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + **upload_kwargs + ): for kwarg in upload_kwargs: if kwarg not in VALID_UPLOAD_KWARGS: raise ValueError( @@ -219,6 +295,7 @@ def __init__(self, blob, chunk_size=None, text_mode=False, **upload_kwargs): # In text mode this class will be wrapped and TextIOWrapper requires a # different behavior of flush(). self._text_mode = text_mode + self._retry = retry self._upload_kwargs = upload_kwargs @property @@ -259,20 +336,32 @@ def write(self, b): return pos def _initiate_upload(self): + # num_retries is only supported for backwards-compatibility reasons. num_retries = self._upload_kwargs.pop("num_retries", None) + retry = self._retry content_type = self._upload_kwargs.pop("content_type", None) - if ( - self._upload_kwargs.get("if_metageneration_match") is None - and num_retries is None - ): - # Uploads are only idempotent (safe to retry) if - # if_metageneration_match is set. If it is not set, the default - # num_retries should be 0. Note: Because retry logic for uploads is - # provided by the google-resumable-media-python package, it doesn't - # use the ConditionalRetryStrategy class used in other API calls in - # this library to solve this problem. - num_retries = 0 + if num_retries is not None: + warnings.warn(_NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2) + # num_retries and retry are mutually exclusive. If num_retries is + # set and retry is exactly the default, then nullify retry for + # backwards compatibility. + if retry is DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED: + retry = None + + # Handle ConditionalRetryPolicy. + if isinstance(retry, ConditionalRetryPolicy): + # Conditional retries are designed for non-media calls, which change + # arguments into query_params dictionaries. Media operations work + # differently, so here we make a "fake" query_params to feed to the + # ConditionalRetryPolicy. + query_params = { + "ifGenerationMatch": self._upload_kwargs.get("if_generation_match"), + "ifMetagenerationMatch": self._upload_kwargs.get( + "if_metageneration_match" + ), + } + retry = retry.get_retry_policy_if_conditions_met(query_params=query_params) self._upload_and_transport = self._blob._initiate_resumable_upload( self._blob.bucket.client, @@ -281,6 +370,7 @@ def _initiate_upload(self): None, num_retries, chunk_size=self._chunk_size, + retry=retry, **self._upload_kwargs ) diff --git a/google/cloud/storage/hmac_key.py b/google/cloud/storage/hmac_key.py index 3fd49079e..5cec51fa7 100644 --- a/google/cloud/storage/hmac_key.py +++ b/google/cloud/storage/hmac_key.py @@ -193,25 +193,13 @@ def exists(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): """Determine whether or not the key for this metadata exists. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: bool :returns: True if the key exists in Cloud Storage. @@ -222,12 +210,8 @@ def exists(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): if self.user_project is not None: qs_params["userProject"] = self.user_project - self._client._connection.api_request( - method="GET", - path=self.path, - query_params=qs_params, - timeout=timeout, - retry=retry, + self._client._get_resource( + self.path, query_params=qs_params, timeout=timeout, retry=retry, ) except NotFound: return False @@ -238,25 +222,13 @@ def reload(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): """Reload properties from Cloud Storage. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises :class:`~google.api_core.exceptions.NotFound`: if the key does not exist on the back-end. @@ -266,37 +238,21 @@ def reload(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): if self.user_project is not None: qs_params["userProject"] = self.user_project - self._properties = self._client._connection.api_request( - method="GET", - path=self.path, - query_params=qs_params, - timeout=timeout, - retry=retry, + self._properties = self._client._get_resource( + self.path, query_params=qs_params, timeout=timeout, retry=retry, ) def update(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_ETAG_IN_JSON): """Save writable properties to Cloud Storage. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises :class:`~google.api_core.exceptions.NotFound`: if the key does not exist on the back-end. @@ -306,38 +262,21 @@ def update(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY_IF_ETAG_IN_JSON): qs_params["userProject"] = self.user_project payload = {"state": self.state} - self._properties = self._client._connection.api_request( - method="PUT", - path=self.path, - data=payload, - query_params=qs_params, - timeout=timeout, - retry=retry, + self._properties = self._client._put_resource( + self.path, payload, query_params=qs_params, timeout=timeout, retry=retry, ) def delete(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): """Delete the key from Cloud Storage. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises :class:`~google.api_core.exceptions.NotFound`: if the key does not exist on the back-end. @@ -349,10 +288,6 @@ def delete(self, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): if self.user_project is not None: qs_params["userProject"] = self.user_project - self._client._connection.api_request( - method="DELETE", - path=self.path, - query_params=qs_params, - timeout=timeout, - retry=retry, + self._client._delete_resource( + self.path, query_params=qs_params, timeout=timeout, retry=retry, ) diff --git a/google/cloud/storage/notification.py b/google/cloud/storage/notification.py index fde5e4559..d23343100 100644 --- a/google/cloud/storage/notification.py +++ b/google/cloud/storage/notification.py @@ -233,7 +233,7 @@ def _set_properties(self, response): self._properties.clear() self._properties.update(response) - def create(self, client=None, timeout=_DEFAULT_TIMEOUT): + def create(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=None): """API wrapper: create the notification. See: @@ -246,11 +246,13 @@ def create(self, client=None, timeout=_DEFAULT_TIMEOUT): :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the notification's bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` """ if self.notification_id is not None: raise ValueError( @@ -266,13 +268,8 @@ def create(self, client=None, timeout=_DEFAULT_TIMEOUT): path = "/b/{}/notificationConfigs".format(self.bucket.name) properties = self._properties.copy() properties["topic"] = _TOPIC_REF_FMT.format(self.topic_project, self.topic_name) - self._properties = client._connection.api_request( - method="POST", - path=path, - query_params=query_params, - data=properties, - timeout=timeout, - retry=None, + self._properties = client._post_resource( + path, properties, query_params=query_params, timeout=timeout, retry=retry, ) def exists(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): @@ -289,25 +286,13 @@ def exists(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :rtype: bool :returns: True, if the notification exists, else False. @@ -323,12 +308,8 @@ def exists(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): query_params["userProject"] = self.bucket.user_project try: - client._connection.api_request( - method="GET", - path=self.path, - query_params=query_params, - timeout=timeout, - retry=retry, + client._get_resource( + self.path, query_params=query_params, timeout=timeout, retry=retry, ) except NotFound: return False @@ -349,25 +330,13 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises ValueError: if the notification has no ID. @@ -381,12 +350,8 @@ def reload(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project - response = client._connection.api_request( - method="GET", - path=self.path, - query_params=query_params, - timeout=timeout, - retry=retry, + response = client._get_resource( + self.path, query_params=query_params, timeout=timeout, retry=retry, ) self._set_properties(response) @@ -404,25 +369,13 @@ def delete(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the current bucket. :type timeout: float or tuple - :param timeout: (Optional) The amount of time, in seconds, to wait - for the server response. - - Can also be passed as a tuple (connect_timeout, read_timeout). - See :meth:`requests.Session.request` documentation for details. + :param timeout: + (Optional) The amount of time, in seconds, to wait + for the server response. See: :ref:`configuring_timeouts` :type retry: google.api_core.retry.Retry or google.cloud.storage.retry.ConditionalRetryPolicy - :param retry: (Optional) How to retry the RPC. A None value will disable retries. - A google.api_core.retry.Retry value will enable retries, and the object will - define retriable response codes and errors and configure backoff and timeout options. - - A google.cloud.storage.retry.ConditionalRetryPolicy value wraps a Retry object and - activates it only if certain conditions are met. This class exists to provide safe defaults - for RPC calls that are not technically safe to retry normally (due to potential data - duplication or other side-effects) but become safe to retry if a condition such as - if_metageneration_match is set. - - See the retry.py source code and docstrings in this package (google.cloud.storage.retry) for - information on retry types and how to configure them. + :param retry: + (Optional) How to retry the RPC. See: :ref:`configuring_retries` :raises: :class:`google.api_core.exceptions.NotFound`: if the notification does not exist. @@ -437,12 +390,8 @@ def delete(self, client=None, timeout=_DEFAULT_TIMEOUT, retry=DEFAULT_RETRY): if self.bucket.user_project is not None: query_params["userProject"] = self.bucket.user_project - client._connection.api_request( - method="DELETE", - path=self.path, - query_params=query_params, - timeout=timeout, - retry=retry, + client._delete_resource( + self.path, query_params=query_params, timeout=timeout, retry=retry, ) diff --git a/google/cloud/storage/retry.py b/google/cloud/storage/retry.py index e17f3d5a0..a9cdc9c0d 100644 --- a/google/cloud/storage/retry.py +++ b/google/cloud/storage/retry.py @@ -21,7 +21,14 @@ import json -_RETRYABLE_TYPES = ( +# ConnectionError is a built-in exception only in Python3 and not in Python2. +try: + _RETRYABLE_STDLIB_TYPES = (ConnectionError,) +except NameError: + _RETRYABLE_STDLIB_TYPES = () + + +_RETRYABLE_TYPES = _RETRYABLE_STDLIB_TYPES + ( api_exceptions.TooManyRequests, # 429 api_exceptions.InternalServerError, # 500 api_exceptions.BadGateway, # 502 @@ -30,6 +37,7 @@ requests.ConnectionError, ) + # Some retriable errors don't have their own custom exception in api_core. _ADDITIONAL_RETRYABLE_STATUS_CODES = (408,) @@ -131,9 +139,29 @@ def is_etag_in_json(data): DEFAULT_RETRY_IF_GENERATION_SPECIFIED = ConditionalRetryPolicy( DEFAULT_RETRY, is_generation_specified, ["query_params"] ) +"""Conditional wrapper for the default retry object. + +This retry setting will retry all _RETRYABLE_TYPES and any status codes from +_ADDITIONAL_RETRYABLE_STATUS_CODES, but only if the request included an +``ifGenerationMatch`` header. +""" + DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED = ConditionalRetryPolicy( DEFAULT_RETRY, is_metageneration_specified, ["query_params"] ) +"""Conditional wrapper for the default retry object. + +This retry setting will retry all _RETRYABLE_TYPES and any status codes from +_ADDITIONAL_RETRYABLE_STATUS_CODES, but only if the request included an +``ifMetagenerationMatch`` header. +""" + DEFAULT_RETRY_IF_ETAG_IN_JSON = ConditionalRetryPolicy( DEFAULT_RETRY, is_etag_in_json, ["data"] ) +"""Conditional wrapper for the default retry object. + +This retry setting will retry all _RETRYABLE_TYPES and any status codes from +_ADDITIONAL_RETRYABLE_STATUS_CODES, but only if the request included an +``ETAG`` entry in its payload. +""" diff --git a/google/cloud/storage/version.py b/google/cloud/storage/version.py index 31e5a5cad..05c5a222e 100644 --- a/google/cloud/storage/version.py +++ b/google/cloud/storage/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "1.38.0" +__version__ = "1.39.0" diff --git a/noxfile.py b/noxfile.py index 5f3f73ebb..c34e8b981 100644 --- a/noxfile.py +++ b/noxfile.py @@ -45,15 +45,11 @@ def lint(session): session.run("flake8", "google", "tests") -@nox.session(python="3.6") +@nox.session(python=DEFAULT_PYTHON_VERSION) def blacken(session): """Run black. Format code to uniform standard. - - This currently uses Python 3.6 due to the automated Kokoro run of synthtool. - That run uses an image that doesn't have 3.6 installed. Before updating this - check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install(BLACK_VERSION) session.run( @@ -122,15 +118,19 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. + # 2021-05-06: defer installing 'google-cloud-*' to after this package, + # in order to work around Python 2.7 googolapis-common-protos + # issue. + session.install( + "mock", "pytest", + ) + session.install("-e", ".") session.install( - "mock", - "pytest", "google-cloud-testutils", "google-cloud-iam", "google-cloud-pubsub < 2.0.0", "google-cloud-kms < 2.0dev", ) - session.install("-e", ".") # Run py.test against the system tests. if system_test_exists: @@ -179,9 +179,8 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("grpcio") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/synth.py b/owlbot.py similarity index 86% rename from synth.py rename to owlbot.py index 9774b1a01..0e23239ec 100644 --- a/synth.py +++ b/owlbot.py @@ -32,9 +32,13 @@ # See: https://github.com/googleapis/python-storage/issues/226 "google-cloud-kms < 2.0dev", ], + intersphinx_dependencies = { + "requests": "https://docs.python-requests.org/en/master/" + }, ) + s.move( - templated_files, excludes=["docs/multiprocessing.rst"], + templated_files, excludes=["docs/multiprocessing.rst", "noxfile.py", "CONTRIBUTING.rst"], ) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/renovate.json b/renovate.json index 4fa949311..c04895563 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,9 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"], + "pip_requirements": { + "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] + } } diff --git a/setup.py b/setup.py index aff482545..6f6fa1f3d 100644 --- a/setup.py +++ b/setup.py @@ -17,20 +17,6 @@ import setuptools -# Disable version normalization performed by setuptools.setup() -try: - # Try the approach of using sic(), added in setuptools 46.1.0 - from setuptools import sic -except ImportError: - # Try the approach of replacing packaging.version.Version - sic = lambda v: v - try: - # setuptools >=39.0.0 uses packaging from setuptools.extern - from setuptools.extern import packaging - except ImportError: - # setuptools <39.0.0 uses packaging from pkg_resources.extern - from pkg_resources.extern import packaging - packaging.version.Version = packaging.version.LegacyVersion # Package metadata. @@ -44,8 +30,9 @@ dependencies = [ "google-auth >= 1.11.0, < 2.0dev", "google-cloud-core >= 1.4.1, < 2.0dev", - "google-resumable-media >= 1.2.0, < 2.0dev", + "google-resumable-media >= 1.3.0, < 2.0dev", "requests >= 2.18.0, < 3.0.0dev", + "googleapis-common-protos < 1.53.0; python_version<'3.0'", ] extras = {} @@ -77,7 +64,7 @@ setuptools.setup( name=name, - version=sic(version), + version=version, description=description, long_description=readme, author="Google LLC", diff --git a/synth.metadata b/synth.metadata deleted file mode 100644 index 7fdf91454..000000000 --- a/synth.metadata +++ /dev/null @@ -1,83 +0,0 @@ -{ - "sources": [ - { - "git": { - "name": ".", - "remote": "https://github.com/googleapis/python-storage.git", - "sha": "dc3671963f25dde7ba393c8e3939225b5c61d158" - } - }, - { - "git": { - "name": "synthtool", - "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f3c04883d6c43261ff13db1f52d03a283be06871" - } - } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "noxfile.py", - "renovate.json", - "scripts/decrypt-secrets.sh", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore" - ] -} \ No newline at end of file diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 6ec1c2a68..6fbaa02c2 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -27,6 +27,7 @@ import mock import requests +import pytest import six from google.cloud import exceptions @@ -35,6 +36,7 @@ from google.cloud.storage._helpers import _base64_md5hash from google.cloud.storage.bucket import LifecycleRuleDelete from google.cloud.storage.bucket import LifecycleRuleSetStorageClass +from google.cloud import _helpers from google.cloud import kms from google import resumable_media import google.auth @@ -42,6 +44,7 @@ from google.api_core import path_template import google.oauth2 from test_utils.retry import RetryErrors +from test_utils.retry import RetryInstanceState from test_utils.system import unique_resource_id from test_utils.vpcsc_config import vpcsc_config @@ -57,12 +60,17 @@ def _bad_copy(bad_request): return err_msg.startswith("No file found in request. (POST") and "copyTo" in err_msg +def _no_event_based_hold(blob): + return not blob.event_based_hold + + retry_429 = RetryErrors(exceptions.TooManyRequests, max_tries=6) retry_429_harder = RetryErrors(exceptions.TooManyRequests, max_tries=10) retry_429_503 = RetryErrors( [exceptions.TooManyRequests, exceptions.ServiceUnavailable], max_tries=10 ) retry_bad_copy = RetryErrors(exceptions.BadRequest, error_predicate=_bad_copy) +retry_no_event_based_hold = RetryInstanceState(_no_event_based_hold) def _empty_bucket(client, bucket): @@ -141,13 +149,33 @@ def test_get_service_account_email(self): self.assertTrue(any(match for match in matches if match is not None)) + @staticmethod + def _get_before_hmac_keys(client): + from google.cloud.storage.hmac_key import HMACKeyMetadata + + before_hmac_keys = set(client.list_hmac_keys()) + + now = datetime.datetime.utcnow().replace(tzinfo=_helpers.UTC) + yesterday = now - datetime.timedelta(days=1) + + # Delete any HMAC keys older than a day. + for hmac_key in list(before_hmac_keys): + if hmac_key.time_created < yesterday: + if hmac_key.state != HMACKeyMetadata.INACTIVE_STATE: + hmac_key.state = HMACKeyMetadata.INACTIVE_STATE + hmac_key.update() + hmac_key.delete() + before_hmac_keys.remove(hmac_key) + + return before_hmac_keys + def test_hmac_key_crud(self): from google.cloud.storage.hmac_key import HMACKeyMetadata credentials = Config.CLIENT._credentials email = credentials.service_account_email - before_keys = set(Config.CLIENT.list_hmac_keys()) + before_hmac_keys = self._get_before_hmac_keys(Config.CLIENT) metadata, secret = Config.CLIENT.create_hmac_key(email) self.case_hmac_keys_to_delete.append(metadata) @@ -155,9 +183,9 @@ def test_hmac_key_crud(self): self.assertIsInstance(secret, six.text_type) self.assertEqual(len(secret), 40) - after_keys = set(Config.CLIENT.list_hmac_keys()) - self.assertFalse(metadata in before_keys) - self.assertTrue(metadata in after_keys) + after_hmac_keys = set(Config.CLIENT.list_hmac_keys()) + self.assertFalse(metadata in before_hmac_keys) + self.assertTrue(metadata in after_hmac_keys) another = HMACKeyMetadata(Config.CLIENT) @@ -303,7 +331,6 @@ def test_bucket_update_labels(self): self.assertEqual(bucket.labels, {}) def test_get_set_iam_policy(self): - import pytest from google.cloud.storage.iam import STORAGE_OBJECT_VIEWER_ROLE from google.api_core.exceptions import BadRequest, PreconditionFailed @@ -1723,7 +1750,7 @@ def test_compose_replace_existing_blob(self): composed = original.download_as_bytes() self.assertEqual(composed, BEFORE + TO_APPEND) - def test_compose_with_generation_match(self): + def test_compose_with_generation_match_list(self): BEFORE = b"AAA\n" original = self.bucket.blob("original") original.content_type = "text/plain" @@ -1751,6 +1778,49 @@ def test_compose_with_generation_match(self): composed = original.download_as_bytes() self.assertEqual(composed, BEFORE + TO_APPEND) + def test_compose_with_generation_match_long(self): + BEFORE = b"AAA\n" + original = self.bucket.blob("original") + original.content_type = "text/plain" + original.upload_from_string(BEFORE) + self.case_blobs_to_delete.append(original) + + TO_APPEND = b"BBB\n" + to_append = self.bucket.blob("to_append") + to_append.upload_from_string(TO_APPEND) + self.case_blobs_to_delete.append(to_append) + + with self.assertRaises(google.api_core.exceptions.PreconditionFailed): + original.compose([original, to_append], if_generation_match=0) + + original.compose([original, to_append], if_generation_match=original.generation) + + composed = original.download_as_bytes() + self.assertEqual(composed, BEFORE + TO_APPEND) + + def test_compose_with_source_generation_match(self): + BEFORE = b"AAA\n" + original = self.bucket.blob("original") + original.content_type = "text/plain" + original.upload_from_string(BEFORE) + self.case_blobs_to_delete.append(original) + + TO_APPEND = b"BBB\n" + to_append = self.bucket.blob("to_append") + to_append.upload_from_string(TO_APPEND) + self.case_blobs_to_delete.append(to_append) + + with self.assertRaises(google.api_core.exceptions.PreconditionFailed): + original.compose([original, to_append], if_source_generation_match=[6, 7]) + + original.compose( + [original, to_append], + if_source_generation_match=[original.generation, to_append.generation], + ) + + composed = original.download_as_bytes() + self.assertEqual(composed, BEFORE + TO_APPEND) + @unittest.skipUnless(USER_PROJECT, "USER_PROJECT not set in environment.") def test_compose_with_user_project(self): new_bucket_name = "compose-user-project" + unique_resource_id("-") @@ -2439,6 +2509,11 @@ def test_bucket_w_default_event_based_hold(self): self.assertFalse(bucket.retention_policy_locked) blob.upload_from_string(payload) + + # https://github.com/googleapis/python-storage/issues/435 + if blob.event_based_hold: + retry_no_event_based_hold(blob.reload)() + self.assertFalse(blob.event_based_hold) self.assertFalse(blob.temporary_hold) self.assertIsNone(blob.retention_expiration_time) diff --git a/tests/unit/test__helpers.py b/tests/unit/test__helpers.py index fa989f96e..75a439cf1 100644 --- a/tests/unit/test__helpers.py +++ b/tests/unit/test__helpers.py @@ -66,6 +66,7 @@ def _derivedClass(self, path=None, user_project=None): class Derived(self._get_target_class()): client = None + _actual_encryption_headers = None @property def path(self): @@ -75,6 +76,9 @@ def path(self): def user_project(self): return user_project + def _encryption_headers(self): + return self._actual_encryption_headers or {} + return Derived def test_path_is_abstract(self): @@ -105,119 +109,130 @@ def test__query_params_w_user_project(self): derived = self._derivedClass("/path", user_project)() self.assertEqual(derived._query_params, {"userProject": user_project}) - def test_reload(self): - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_reload_w_defaults(self): + path = "/path" + response = {"foo": "Foo"} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = response + derived = self._derivedClass(path)() # Make sure changes is not a set instance before calling reload # (which will clear / replace it with an empty set), checked below. derived._changes = object() - derived.reload(client=client, timeout=42) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/path", - "query_params": {"projection": "noAcl"}, - "headers": {}, - "_target_object": derived, - "timeout": 42, - "retry": DEFAULT_RETRY, - }, - ) + derived.client = client + + derived.reload() + + self.assertEqual(derived._properties, response) self.assertEqual(derived._changes, set()) - def test_reload_with_generation_match(self): - GENERATION_NUMBER = 9 - METAGENERATION_NUMBER = 6 + expected_query_params = {"projection": "noAcl"} + expected_headers = {} # no encryption headers by default + client._get_resource.assert_called_once_with( + path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=derived, + ) - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_reload_w_generation_match_w_timeout(self): + generation_number = 9 + metageneration_number = 6 + path = "/path" + timeout = 42 + response = {"foo": "Foo"} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = response + derived = self._derivedClass(path)() # Make sure changes is not a set instance before calling reload # (which will clear / replace it with an empty set), checked below. derived._changes = object() + derived.client = client + derived.reload( - client=client, - timeout=42, - if_generation_match=GENERATION_NUMBER, - if_metageneration_match=METAGENERATION_NUMBER, - ) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/path", - "query_params": { - "projection": "noAcl", - "ifGenerationMatch": GENERATION_NUMBER, - "ifMetagenerationMatch": METAGENERATION_NUMBER, - }, - "headers": {}, - "_target_object": derived, - "timeout": 42, - "retry": DEFAULT_RETRY, - }, + if_generation_match=generation_number, + if_metageneration_match=metageneration_number, + timeout=timeout, ) + + self.assertEqual(derived._properties, response) self.assertEqual(derived._changes, set()) - def test_reload_w_user_project(self): + expected_query_params = { + "projection": "noAcl", + "ifGenerationMatch": generation_number, + "ifMetagenerationMatch": metageneration_number, + } + expected_headers = {} # no encryption headers by default + client._get_resource.assert_called_once_with( + path, + query_params=expected_query_params, + headers=expected_headers, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=derived, + ) + + def test_reload_w_user_project_w_retry(self): user_project = "user-project-123" - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path", user_project)() + path = "/path" + retry = mock.Mock(spec=[]) + response = {"foo": "Foo"} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = response + derived = self._derivedClass(path, user_project)() # Make sure changes is not a set instance before calling reload # (which will clear / replace it with an empty set), checked below. derived._changes = object() - derived.reload(client=client) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/path", - "query_params": {"projection": "noAcl", "userProject": user_project}, - "headers": {}, - "_target_object": derived, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, - ) + derived.client = client + + derived.reload(retry=retry) + + self.assertEqual(derived._properties, response) self.assertEqual(derived._changes, set()) - def test_reload_w_projection(self): - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + expected_query_params = { + "projection": "noAcl", + "userProject": user_project, + } + expected_headers = {} # no encryption headers by default + client._get_resource.assert_called_once_with( + path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=derived, + ) + + def test_reload_w_projection_w_explicit_client_w_enc_header(self): + path = "/path" + response = {"foo": "Foo"} + encryption_headers = {"bar": "Bar"} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = response + derived = self._derivedClass(path)() # Make sure changes is not a set instance before calling reload # (which will clear / replace it with an empty set), checked below. derived._changes = object() - derived.reload(projection="full", client=client, timeout=42) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/path", - "query_params": {"projection": "full"}, - "headers": {}, - "_target_object": derived, - "timeout": 42, - "retry": DEFAULT_RETRY, - }, - ) + derived._actual_encryption_headers = encryption_headers + + derived.reload(projection="full", client=client) + + self.assertEqual(derived._properties, response) self.assertEqual(derived._changes, set()) + expected_query_params = {"projection": "full"} + client._get_resource.assert_called_once_with( + path, + query_params=expected_query_params, + headers=encryption_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=derived, + ) + def test__set_properties(self): mixin = self._make_one() self.assertEqual(mixin._properties, {}) @@ -230,182 +245,204 @@ def test__patch_property(self): derived._patch_property("foo", "Foo") self.assertEqual(derived._properties, {"foo": "Foo"}) - def test_patch(self): - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_patch_w_defaults(self): + path = "/path" + api_response = {"foo": "Foo"} + derived = self._derivedClass(path)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Ignore baz. - derived.patch(client=client, timeout=42) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/path", - "query_params": {"projection": "full"}, - # Since changes does not include `baz`, we don't see it sent. - "data": {"bar": BAR}, - "_target_object": derived, - "timeout": 42, - "retry": DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, - }, - ) + client = derived.client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + + derived.patch() + + self.assertEqual(derived._properties, api_response) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) - def test_patch_with_metageneration_match(self): - GENERATION_NUMBER = 9 - METAGENERATION_NUMBER = 6 + expected_data = {"bar": bar} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=derived, + ) - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_patch_w_metageneration_match_w_timeout_w_retry(self): + path = "/path" + api_response = {"foo": "Foo"} + derived = self._derivedClass(path)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Ignore baz. + client = derived.client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + timeout = 42 + retry = mock.Mock(spec=[]) + generation_number = 9 + metageneration_number = 6 + derived.patch( - client=client, - timeout=42, - if_generation_match=GENERATION_NUMBER, - if_metageneration_match=METAGENERATION_NUMBER, + if_generation_match=generation_number, + if_metageneration_match=metageneration_number, + timeout=timeout, + retry=retry, ) + self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/path", - "query_params": { - "projection": "full", - "ifGenerationMatch": GENERATION_NUMBER, - "ifMetagenerationMatch": METAGENERATION_NUMBER, - }, - # Since changes does not include `baz`, we don't see it sent. - "data": {"bar": BAR}, - "_target_object": derived, - "timeout": 42, - "retry": DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, - }, - ) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) - def test_patch_w_user_project(self): + expected_data = {"bar": bar} + expected_query_params = { + "projection": "full", + "ifGenerationMatch": generation_number, + "ifMetagenerationMatch": metageneration_number, + } + client._patch_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=derived, + ) + + def test_patch_w_user_project_w_explicit_client(self): + path = "/path" user_project = "user-project-123" - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path", user_project)() + api_response = {"foo": "Foo"} + derived = self._derivedClass(path, user_project)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Ignore baz. + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + derived.patch(client=client) + self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/path", - "query_params": {"projection": "full", "userProject": user_project}, - # Since changes does not include `baz`, we don't see it sent. - "data": {"bar": BAR}, - "_target_object": derived, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, - }, - ) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) - def test_update(self): - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + expected_data = {"bar": bar} + expected_query_params = { + "projection": "full", + "userProject": user_project, + } + client._patch_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=derived, + ) + + def test_update_w_defaults(self): + path = "/path" + api_response = {"foo": "Foo"} + derived = self._derivedClass(path)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + expected_data = derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Update sends 'baz' anyway. - derived.update(client=client, timeout=42) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "/path") - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["data"], {"bar": BAR, "baz": BAZ}) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED) - # Make sure changes get reset by patch(). + client = derived.client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + + derived.update() + + self.assertEqual(derived._properties, api_response) + # Make sure changes get reset by update(). self.assertEqual(derived._changes, set()) - def test_update_with_metageneration_not_match(self): - GENERATION_NUMBER = 6 + expected_query_params = {"projection": "full"} + client._put_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=derived, + ) - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path")() + def test_update_with_metageneration_not_match_w_timeout_w_retry(self): + path = "/path" + generation_number = 6 + api_response = {"foo": "Foo"} + derived = self._derivedClass(path)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + expected_data = derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Update sends 'baz' anyway. + client = derived.client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + timeout = 42 + derived.update( - client=client, timeout=42, if_metageneration_not_match=GENERATION_NUMBER + if_metageneration_not_match=generation_number, timeout=timeout, ) + self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "/path") - self.assertEqual( - kw[0]["query_params"], - {"projection": "full", "ifMetagenerationNotMatch": GENERATION_NUMBER}, - ) - self.assertEqual(kw[0]["data"], {"bar": BAR, "baz": BAZ}) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) - def test_update_w_user_project(self): + expected_query_params = { + "projection": "full", + "ifMetagenerationNotMatch": generation_number, + } + client._put_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=derived, + ) + + def test_update_w_user_project_w_retry_w_explicit_client(self): user_project = "user-project-123" - connection = _Connection({"foo": "Foo"}) - client = _Client(connection) - derived = self._derivedClass("/path", user_project)() + path = "/path" + api_response = {"foo": "Foo"} + derived = self._derivedClass(path, user_project)() # Make sure changes is non-empty, so we can observe a change. - BAR = object() - BAZ = object() - derived._properties = {"bar": BAR, "baz": BAZ} + bar = object() + baz = object() + expected_data = derived._properties = {"bar": bar, "baz": baz} derived._changes = set(["bar"]) # Update sends 'baz' anyway. - derived.update(client=client) - self.assertEqual(derived._properties, {"foo": "Foo"}) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "/path") - self.assertEqual( - kw[0]["query_params"], {"projection": "full", "userProject": user_project} - ) - self.assertEqual(kw[0]["data"], {"bar": BAR, "baz": BAZ}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED) + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + retry = mock.Mock(spec=[]) + + derived.update(client=client, retry=retry) # Make sure changes get reset by patch(). self.assertEqual(derived._changes, set()) + expected_query_params = { + "projection": "full", + "userProject": user_project, + } + client._put_resource.assert_called_once_with( + path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=derived, + ) + class Test__scalar_property(unittest.TestCase): def _call_fut(self, fieldName): @@ -556,15 +593,43 @@ def test_hostname_and_scheme(self): self.assertEqual(self._call_fut(host=HOST, scheme=SCHEME), EXPECTED_URL) -class _Connection(object): - def __init__(self, *responses): - self._responses = responses - self._requested = [] +class Test__api_core_retry_to_resumable_media_retry(unittest.TestCase): + def test_conflict(self): + from google.cloud.storage._helpers import ( + _api_core_retry_to_resumable_media_retry, + ) - def api_request(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response + with self.assertRaises(ValueError): + _api_core_retry_to_resumable_media_retry(retry=DEFAULT_RETRY, num_retries=2) + + def test_retry(self): + from google.cloud.storage._helpers import ( + _api_core_retry_to_resumable_media_retry, + ) + + retry_strategy = _api_core_retry_to_resumable_media_retry(retry=DEFAULT_RETRY) + self.assertEqual(retry_strategy.max_sleep, DEFAULT_RETRY._maximum) + self.assertEqual(retry_strategy.max_cumulative_retry, DEFAULT_RETRY._deadline) + self.assertEqual(retry_strategy.initial_delay, DEFAULT_RETRY._initial) + self.assertEqual(retry_strategy.multiplier, DEFAULT_RETRY._multiplier) + + def test_num_retries(self): + from google.cloud.storage._helpers import ( + _api_core_retry_to_resumable_media_retry, + ) + + retry_strategy = _api_core_retry_to_resumable_media_retry( + retry=None, num_retries=2 + ) + self.assertEqual(retry_strategy.max_retries, 2) + + def test_none(self): + from google.cloud.storage._helpers import ( + _api_core_retry_to_resumable_media_retry, + ) + + retry_strategy = _api_core_retry_to_resumable_media_retry(retry=None) + self.assertEqual(retry_strategy.max_retries, 0) class _MD5Hash(object): @@ -598,8 +663,3 @@ def __init__(self): def b64encode(self, value): self._called_b64encode.append(value) return value - - -class _Client(object): - def __init__(self, connection): - self._connection = connection diff --git a/tests/unit/test_acl.py b/tests/unit/test_acl.py index 47400f1ef..aad44809e 100644 --- a/tests/unit/test_acl.py +++ b/tests/unit/test_acl.py @@ -14,6 +14,10 @@ import unittest +import mock + +from google.cloud.storage.retry import DEFAULT_RETRY + class Test_ACLEntity(unittest.TestCase): @staticmethod @@ -530,270 +534,345 @@ def test_get_entities_nonempty(self): entity = acl.entity(TYPE, ID) self.assertEqual(acl.get_entities(), [entity]) - def test_reload_missing(self): + def test_reload_missing_w_defaults(self): # https://github.com/GoogleCloudPlatform/google-cloud-python/issues/652 - ROLE = "role" - connection = _Connection({}) - client = _Client(connection) - acl = self._make_one() - acl.reload_path = "/testing/acl" + class Derived(self._get_target_class()): + client = None + + role = "role" + reload_path = "/testing/acl" + api_response = {} + acl = Derived() + acl.reload_path = reload_path acl.loaded = True - acl.entity("allUsers", ROLE) - acl.reload(client=client, timeout=42) + acl.entity("allUsers", role) + client = acl.client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + + acl.reload() + self.assertEqual(list(acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/testing/acl", - "query_params": {}, - "timeout": 42, - }, + + expected_query_params = {} + client._get_resource.assert_called_once_with( + reload_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - def test_reload_empty_result_clears_local(self): - ROLE = "role" - connection = _Connection({"items": []}) - client = _Client(connection) + def test_reload_w_empty_result_w_timeout_w_retry_w_explicit_client(self): + role = "role" + reload_path = "/testing/acl" + timeout = 42 + retry = mock.Mock(spec=[]) + api_response = {"items": []} acl = self._make_one() - acl.reload_path = "/testing/acl" + acl.reload_path = reload_path acl.loaded = True - acl.entity("allUsers", ROLE) + acl.entity("allUsers", role) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response - acl.reload(client=client) + acl.reload(client=client, timeout=timeout, retry=retry) self.assertTrue(acl.loaded) self.assertEqual(list(acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/testing/acl", - "query_params": {}, - "timeout": self._get_default_timeout(), - }, + + expected_query_params = {} + client._get_resource.assert_called_once_with( + reload_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, ) - def test_reload_nonempty_result_w_user_project(self): - ROLE = "role" - USER_PROJECT = "user-project-123" - connection = _Connection({"items": [{"entity": "allUsers", "role": ROLE}]}) - client = _Client(connection) + def test_reload_w_nonempty_result_w_user_project(self): + role = "role" + reload_path = "/testing/acl" + user_project = "user-project-123" + api_response = {"items": [{"entity": "allUsers", "role": role}]} acl = self._make_one() - acl.reload_path = "/testing/acl" + acl.reload_path = reload_path acl.loaded = True - acl.user_project = USER_PROJECT + acl.user_project = user_project + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response acl.reload(client=client) self.assertTrue(acl.loaded) - self.assertEqual(list(acl), [{"entity": "allUsers", "role": ROLE}]) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "/testing/acl", - "query_params": {"userProject": USER_PROJECT}, - "timeout": self._get_default_timeout(), - }, + self.assertEqual(list(acl), [{"entity": "allUsers", "role": role}]) + + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + reload_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) def test_save_none_set_none_passed(self): - connection = _Connection() - client = _Client(connection) + save_path = "/testing" + client = mock.Mock(spec=["_patch_resource"]) acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path + acl.save(client=client) - kw = connection._requested - self.assertEqual(len(kw), 0) - def test_save_existing_missing_none_passed(self): - connection = _Connection({}) - client = _Client(connection) - acl = self._make_one() - acl.save_path = "/testing" + client._patch_resource.assert_not_called() + + def test_save_w_empty_response_w_defaults(self): + class Derived(self._get_target_class()): + client = None + + save_path = "/testing" + api_response = {} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + acl = Derived() + acl.client = client + acl.save_path = save_path acl.loaded = True - acl.save(client=client, timeout=42) + + acl.save() + self.assertEqual(list(acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/testing") - self.assertEqual(kw[0]["data"], {"acl": []}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], 42) - - def test_save_no_acl(self): - ROLE = "role" - AFTER = [{"entity": "allUsers", "role": ROLE}] - connection = _Connection({"acl": AFTER}) - client = _Client(connection) + + expected_data = {"acl": []} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, + ) + + def test_save_no_acl_w_timeout(self): + save_path = "/testing" + role = "role" + expected_acl = [{"entity": "allUsers", "role": role}] + api_response = {"acl": expected_acl} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path acl.loaded = True - acl.entity("allUsers").grant(ROLE) - acl.save(client=client) - self.assertEqual(list(acl), AFTER) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/testing") - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": {"projection": "full"}, - "data": {"acl": AFTER}, - "timeout": self._get_default_timeout(), - }, + acl.entity("allUsers").grant(role) + timeout = 42 + + acl.save(client=client, timeout=timeout) + + self.assertEqual(list(acl), expected_acl) + + expected_data = api_response + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=None, ) def test_save_w_acl_w_user_project(self): - ROLE1 = "role1" - ROLE2 = "role2" - STICKY = {"entity": "allUsers", "role": ROLE2} - USER_PROJECT = "user-project-123" - new_acl = [{"entity": "allUsers", "role": ROLE1}] - connection = _Connection({"acl": [STICKY] + new_acl}) - client = _Client(connection) + save_path = "/testing" + user_project = "user-project-123" + role1 = "role1" + role2 = "role2" + sticky = {"entity": "allUsers", "role": role2} + new_acl = [{"entity": "allUsers", "role": role1}] + api_response = {"acl": [sticky] + new_acl} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path acl.loaded = True - acl.user_project = USER_PROJECT + acl.user_project = user_project acl.save(new_acl, client=client) entries = list(acl) self.assertEqual(len(entries), 2) - self.assertTrue(STICKY in entries) + self.assertTrue(sticky in entries) self.assertTrue(new_acl[0] in entries) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": {"projection": "full", "userProject": USER_PROJECT}, - "data": {"acl": new_acl}, - "timeout": self._get_default_timeout(), - }, + + expected_data = {"acl": new_acl} + expected_query_params = {"projection": "full", "userProject": user_project} + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, ) def test_save_prefefined_invalid(self): - connection = _Connection() - client = _Client(connection) + save_path = "/testing" + client = mock.Mock(spec=["_patch_resource"]) acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path acl.loaded = True + with self.assertRaises(ValueError): acl.save_predefined("bogus", client=client) - def test_save_predefined_valid(self): - PREDEFINED = "private" - connection = _Connection({"acl": []}) - client = _Client(connection) - acl = self._make_one() - acl.save_path = "/testing" + client._patch_resource.assert_not_called() + + def test_save_predefined_w_defaults(self): + class Derived(self._get_target_class()): + client = None + + save_path = "/testing" + predefined = "private" + api_response = {"acl": []} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + acl = Derived() + acl.save_path = save_path acl.loaded = True - acl.save_predefined(PREDEFINED, client=client, timeout=42) + acl.client = client + + acl.save_predefined(predefined) + entries = list(acl) self.assertEqual(len(entries), 0) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": {"projection": "full", "predefinedAcl": PREDEFINED}, - "data": {"acl": []}, - "timeout": 42, - }, + + expected_data = {"acl": []} + expected_query_params = { + "projection": "full", + "predefinedAcl": predefined, + } + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, ) - def test_save_predefined_w_XML_alias(self): - PREDEFINED_XML = "project-private" - PREDEFINED_JSON = "projectPrivate" - connection = _Connection({"acl": []}) - client = _Client(connection) + def test_save_predefined_w_XML_alias_w_timeout(self): + save_path = "/testing" + predefined_xml = "project-private" + predefined_json = "projectPrivate" + api_response = {"acl": []} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path acl.loaded = True - acl.save_predefined(PREDEFINED_XML, client=client) + timeout = 42 + + acl.save_predefined(predefined_xml, client=client, timeout=timeout) + entries = list(acl) self.assertEqual(len(entries), 0) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": { - "projection": "full", - "predefinedAcl": PREDEFINED_JSON, - }, - "data": {"acl": []}, - "timeout": self._get_default_timeout(), - }, + + expected_data = {"acl": []} + expected_query_params = { + "projection": "full", + "predefinedAcl": predefined_json, + } + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=None, ) - def test_save_predefined_valid_w_alternate_query_param(self): + def test_save_predefined_w_alternate_query_param(self): # Cover case where subclass overrides _PREDEFINED_QUERY_PARAM - PREDEFINED = "publicRead" - connection = _Connection({"acl": []}) - client = _Client(connection) + save_path = "/testing" + predefined = "publicRead" + api_response = {"acl": []} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response acl = self._make_one() - acl.save_path = "/testing" + acl.save_path = save_path acl.loaded = True acl._PREDEFINED_QUERY_PARAM = "alternate" - acl.save_predefined(PREDEFINED, client=client) + + acl.save_predefined(predefined, client=client) + entries = list(acl) self.assertEqual(len(entries), 0) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": {"projection": "full", "alternate": PREDEFINED}, - "data": {"acl": []}, - "timeout": self._get_default_timeout(), - }, + + expected_data = {"acl": []} + expected_query_params = { + "projection": "full", + "alternate": predefined, + } + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, ) - def test_clear(self): - ROLE1 = "role1" - ROLE2 = "role2" - STICKY = {"entity": "allUsers", "role": ROLE2} - connection = _Connection({"acl": [STICKY]}) - client = _Client(connection) - acl = self._make_one() - acl.save_path = "/testing" - acl.loaded = True - acl.entity("allUsers", ROLE1) - acl.clear(client=client, timeout=42) - self.assertEqual(list(acl), [STICKY]) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "PATCH", - "path": "/testing", - "query_params": {"projection": "full"}, - "data": {"acl": []}, - "timeout": 42, - }, + def test_clear_w_defaults(self): + class Derived(self._get_target_class()): + client = None + + save_path = "/testing" + role1 = "role1" + role2 = "role2" + sticky = {"entity": "allUsers", "role": role2} + api_response = {"acl": [sticky]} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + acl = Derived() + acl.client = client + acl.save_path = save_path + acl.loaded = True + acl.entity("allUsers", role1) + + acl.clear() + + self.assertEqual(list(acl), [sticky]) + + expected_data = {"acl": []} + expected_query_params = { + "projection": "full", + } + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, + ) + + def test_clear_w_explicit_client_w_timeout(self): + save_path = "/testing" + role1 = "role1" + role2 = "role2" + sticky = {"entity": "allUsers", "role": role2} + api_response = {"acl": [sticky]} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + acl = self._make_one() + acl.save_path = save_path + acl.loaded = True + acl.entity("allUsers", role1) + timeout = 42 + + acl.clear(client=client, timeout=timeout) + + self.assertEqual(list(acl), [sticky]) + + expected_data = {"acl": []} + expected_query_params = { + "projection": "full", + } + client._patch_resource.assert_called_once_with( + save_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=None, ) @@ -905,22 +984,3 @@ def __init__(self, name): @property def path(self): return "/b/%s" % self.name - - -class _Connection(object): - _delete_ok = False - - def __init__(self, *responses): - self._responses = responses - self._requested = [] - self._deleted = [] - - def api_request(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response - - -class _Client(object): - def __init__(self, connection): - self._connection = connection diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 50732a7f0..a21385821 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -27,7 +27,9 @@ from six.moves import http_client from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED +from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED def _make_credentials(): @@ -56,7 +58,7 @@ def _get_default_timeout(): def _make_client(*args, **kw): from google.cloud.storage.client import Client - return Client(*args, **kw) + return mock.create_autospec(Client, instance=True, **kw) def test_ctor_wo_encryption_key(self): BLOB_NAME = "blob-name" @@ -421,10 +423,10 @@ def test_public_url_with_non_ascii(self): def test_generate_signed_url_w_invalid_version(self): BLOB_NAME = "blob-name" EXPIRATION = "2014-10-16T20:34:37.000Z" - connection = _Connection() - client = _Client(connection) + client = self._make_client() bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) + with self.assertRaises(ValueError): blob.generate_signed_url(EXPIRATION, version="nonesuch") @@ -463,8 +465,13 @@ def _generate_signed_url_helper( if expiration is None: expiration = datetime.datetime.utcnow().replace(tzinfo=UTC) + delta - connection = _Connection() - client = _Client(connection) + if credentials is None: + expected_creds = _make_credentials() + client = self._make_client(_credentials=expected_creds) + else: + expected_creds = credentials + client = self._make_client(_credentials=object()) + bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket, encryption_key=encryption_key) @@ -499,11 +506,6 @@ def _generate_signed_url_helper( self.assertEqual(signed_uri, signer.return_value) - if credentials is None: - expected_creds = _Connection.credentials - else: - expected_creds = credentials - encoded_name = blob_name.encode("utf-8") quoted_name = parse.quote(encoded_name, safe=b"/~") @@ -688,117 +690,108 @@ def test_generate_signed_url_v4_w_credentials(self): credentials = object() self._generate_signed_url_v4_helper(credentials=credentials) - def test_exists_miss(self): - NONESUCH = "nonesuch" - not_found_response = ({"status": http_client.NOT_FOUND}, b"") - connection = _Connection(not_found_response) - client = _Client(connection) + def test_exists_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + + blob_name = "nonesuch" + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.side_effect = NotFound("testing") bucket = _Bucket(client) - blob = self._make_one(NONESUCH, bucket=bucket) - self.assertFalse(blob.exists(timeout=42)) - self.assertEqual(len(connection._requested), 1) - self.assertEqual( - connection._requested[0], - { - "method": "GET", - "path": "/b/name/o/{}".format(NONESUCH), - "query_params": {"fields": "name"}, - "_target_object": None, - "timeout": 42, - "retry": DEFAULT_RETRY, - }, + blob = self._make_one(blob_name, bucket=bucket) + + self.assertFalse(blob.exists()) + + expected_query_params = {"fields": "name"} + client._get_resource.assert_called_once_with( + blob.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, ) - def test_exists_hit_w_user_project(self): - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - found_response = ({"status": http_client.OK}, b"") - connection = _Connection(found_response) - client = _Client(connection) - bucket = _Bucket(client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) - bucket._blobs[BLOB_NAME] = 1 - self.assertTrue(blob.exists()) - self.assertEqual(len(connection._requested), 1) - self.assertEqual( - connection._requested[0], - { - "method": "GET", - "path": "/b/name/o/{}".format(BLOB_NAME), - "query_params": {"fields": "name", "userProject": USER_PROJECT}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, + def test_exists_hit_w_user_project_w_timeout(self): + blob_name = "blob-name" + user_project = "user-project-123" + timeout = 42 + api_response = {"name": blob_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = _Bucket(client, user_project=user_project) + blob = self._make_one(blob_name, bucket=bucket) + + self.assertTrue(blob.exists(timeout=timeout)) + + expected_query_params = {"fields": "name", "userProject": user_project} + client._get_resource.assert_called_once_with( + blob.path, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=None, ) - def test_exists_hit_w_generation(self): - BLOB_NAME = "blob-name" - GENERATION = 123456 - found_response = ({"status": http_client.OK}, b"") - connection = _Connection(found_response) - client = _Client(connection) + def test_exists_hit_w_generation_w_retry(self): + blob_name = "blob-name" + generation = 123456 + api_response = {"name": blob_name} + retry = mock.Mock(spec=[]) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response bucket = _Bucket(client) - blob = self._make_one(BLOB_NAME, bucket=bucket, generation=GENERATION) - bucket._blobs[BLOB_NAME] = 1 - self.assertTrue(blob.exists()) - self.assertEqual(len(connection._requested), 1) - self.assertEqual( - connection._requested[0], - { - "method": "GET", - "path": "/b/name/o/{}".format(BLOB_NAME), - "query_params": {"fields": "name", "generation": GENERATION}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, + blob = self._make_one(blob_name, bucket=bucket, generation=generation) + + self.assertTrue(blob.exists(retry=retry)) + + expected_query_params = {"fields": "name", "generation": generation} + client._get_resource.assert_called_once_with( + blob.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=None, ) def test_exists_w_generation_match(self): - BLOB_NAME = "blob-name" - GENERATION_NUMBER = 123456 - METAGENERATION_NUMBER = 6 - - found_response = ({"status": http_client.OK}, b"") - connection = _Connection(found_response) - client = _Client(connection) + blob_name = "blob-name" + generation_number = 123456 + metageneration_number = 6 + api_response = {"name": blob_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response bucket = _Bucket(client) - blob = self._make_one(BLOB_NAME, bucket=bucket) - bucket._blobs[BLOB_NAME] = 1 + blob = self._make_one(blob_name, bucket=bucket) + self.assertTrue( blob.exists( - if_generation_match=GENERATION_NUMBER, - if_metageneration_match=METAGENERATION_NUMBER, + if_generation_match=generation_number, + if_metageneration_match=metageneration_number, + retry=None, ) ) - self.assertEqual(len(connection._requested), 1) - self.assertEqual( - connection._requested[0], - { - "method": "GET", - "path": "/b/name/o/{}".format(BLOB_NAME), - "query_params": { - "fields": "name", - "ifGenerationMatch": GENERATION_NUMBER, - "ifMetagenerationMatch": METAGENERATION_NUMBER, - }, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, + + expected_query_params = { + "fields": "name", + "ifGenerationMatch": generation_number, + "ifMetagenerationMatch": metageneration_number, + } + client._get_resource.assert_called_once_with( + blob.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, + _target_object=None, ) def test_delete_wo_generation(self): BLOB_NAME = "blob-name" - not_found_response = ({"status": http_client.NOT_FOUND}, b"") - connection = _Connection(not_found_response) - client = _Client(connection) + client = self._make_client() bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket) bucket._blobs[BLOB_NAME] = 1 + blob.delete() - self.assertFalse(blob.exists()) + self.assertEqual( bucket._deleted, [ @@ -819,14 +812,13 @@ def test_delete_wo_generation(self): def test_delete_w_generation(self): BLOB_NAME = "blob-name" GENERATION = 123456 - not_found_response = ({"status": http_client.NOT_FOUND}, b"") - connection = _Connection(not_found_response) - client = _Client(connection) + client = self._make_client() bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket, generation=GENERATION) bucket._blobs[BLOB_NAME] = 1 + blob.delete(timeout=42) - self.assertFalse(blob.exists()) + self.assertEqual( bucket._deleted, [ @@ -847,14 +839,13 @@ def test_delete_w_generation(self): def test_delete_w_generation_match(self): BLOB_NAME = "blob-name" GENERATION = 123456 - not_found_response = ({"status": http_client.NOT_FOUND}, b"") - connection = _Connection(not_found_response) - client = _Client(connection) + client = self._make_client() bucket = _Bucket(client) blob = self._make_one(BLOB_NAME, bucket=bucket, generation=GENERATION) bucket._blobs[BLOB_NAME] = 1 + blob.delete(timeout=42, if_generation_match=GENERATION) - self.assertFalse(blob.exists()) + self.assertEqual( bucket._deleted, [ @@ -1031,7 +1022,102 @@ def _mock_requests_response(status_code, headers, content=b""): response.request = requests.Request("POST", "http://example.com").prepare() return response - def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): + def test__extract_headers_from_download_gzipped(self): + blob_name = "blob-name" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + blob = self._make_one(blob_name, bucket=bucket) + + response = self._mock_requests_response( + http_client.OK, + headers={ + "Content-Type": "application/json", + "Content-Language": "ko-kr", + "Cache-Control": "max-age=1337;public", + "Content-Encoding": "gzip", + "X-Goog-Storage-Class": "STANDARD", + "X-Goog-Hash": "crc32c=4gcgLQ==,md5=CS9tHYTtyFntzj7B9nkkJQ==", + }, + # { "x": 5 } gzipped + content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00", + ) + blob._extract_headers_from_download(response) + + self.assertEqual(blob.content_type, "application/json") + self.assertEqual(blob.content_language, "ko-kr") + self.assertEqual(blob.content_encoding, "gzip") + self.assertEqual(blob.cache_control, "max-age=1337;public") + self.assertEqual(blob.storage_class, "STANDARD") + self.assertEqual(blob.md5_hash, "CS9tHYTtyFntzj7B9nkkJQ==") + self.assertEqual(blob.crc32c, "4gcgLQ==") + + def test__extract_headers_from_download_empty(self): + blob_name = "blob-name" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + blob = self._make_one(blob_name, bucket=bucket) + + response = self._mock_requests_response( + http_client.OK, + headers={ + "Content-Type": "application/octet-stream", + "Content-Language": "en-US", + "Cache-Control": "max-age=1337;public", + "Content-Encoding": "gzip", + "X-Goog-Storage-Class": "STANDARD", + "X-Goog-Hash": "crc32c=4/c+LQ==,md5=CS9tHYTt/+ntzj7B9nkkJQ==", + }, + content=b"", + ) + blob._extract_headers_from_download(response) + self.assertEqual(blob.content_type, "application/octet-stream") + self.assertEqual(blob.content_language, "en-US") + self.assertEqual(blob.md5_hash, "CS9tHYTt/+ntzj7B9nkkJQ==") + self.assertEqual(blob.crc32c, "4/c+LQ==") + + def test__extract_headers_from_download_w_hash_response_header_none(self): + blob_name = "blob-name" + md5_hash = "CS9tHYTtyFntzj7B9nkkJQ==" + crc32c = "4gcgLQ==" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + properties = { + "md5Hash": md5_hash, + "crc32c": crc32c, + } + blob = self._make_one(blob_name, bucket=bucket, properties=properties) + + response = self._mock_requests_response( + http_client.OK, + headers={"X-Goog-Hash": ""}, + # { "x": 5 } gzipped + content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00", + ) + blob._extract_headers_from_download(response) + + self.assertEqual(blob.md5_hash, md5_hash) + self.assertEqual(blob.crc32c, crc32c) + + def test__extract_headers_from_download_w_response_headers_not_match(self): + blob_name = "blob-name" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + blob = self._make_one(blob_name, bucket=bucket) + + response = self._mock_requests_response( + http_client.OK, + headers={"X-Goog-Hash": "bogus=4gcgLQ==,"}, + # { "x": 5 } gzipped + content=b"", + ) + blob._extract_headers_from_download(response) + + self.assertIsNone(blob.md5_hash) + self.assertIsNone(blob.crc32c) + + def _do_download_helper_wo_chunks( + self, w_range, raw_download, timeout=None, **extra_kwargs + ): blob_name = "blob-name" client = mock.Mock() bucket = _Bucket(client) @@ -1055,6 +1141,8 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): expected_timeout = timeout timeout_kwarg = {"timeout": timeout} + extra_kwargs.update(timeout_kwarg) + with patch as patched: if w_range: blob._do_download( @@ -1065,7 +1153,7 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): start=1, end=3, raw_download=raw_download, - **timeout_kwarg + **extra_kwargs ) else: blob._do_download( @@ -1074,7 +1162,7 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): download_url, headers, raw_download=raw_download, - **timeout_kwarg + **extra_kwargs ) if w_range: @@ -1100,9 +1188,21 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): transport, timeout=expected_timeout ) + retry_strategy = patched.return_value._retry_strategy + retry = extra_kwargs.get("retry", None) + if retry is None: + self.assertEqual(retry_strategy.max_retries, 0) + else: + self.assertEqual(retry_strategy.max_sleep, retry._maximum) + def test__do_download_wo_chunks_wo_range_wo_raw(self): self._do_download_helper_wo_chunks(w_range=False, raw_download=False) + def test__do_download_wo_chunks_wo_range_wo_raw_w_retry(self): + self._do_download_helper_wo_chunks( + w_range=False, raw_download=False, retry=DEFAULT_RETRY + ) + def test__do_download_wo_chunks_w_range_wo_raw(self): self._do_download_helper_wo_chunks(w_range=True, raw_download=False) @@ -1121,7 +1221,7 @@ def _do_download_helper_w_chunks( self, w_range, raw_download, timeout=None, checksum="md5" ): blob_name = "blob-name" - client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) + client = self._make_client() bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) blob._CHUNK_SIZE_MULTIPLE = 1 @@ -1224,42 +1324,34 @@ def test__do_download_w_chunks_wo_checksum(self): patch.assert_not_called() def test_download_to_file_with_failure(self): - import requests - from google.resumable_media import InvalidResponse - from google.cloud import exceptions - - raw_response = requests.Response() - raw_response.status_code = http_client.NOT_FOUND - raw_request = requests.Request("GET", "http://example.com") - raw_response.request = raw_request.prepare() - grmp_response = InvalidResponse(raw_response) + from google.cloud.exceptions import NotFound blob_name = "blob-name" - media_link = "http://test.invalid" client = self._make_client() + client.download_blob_to_file.side_effect = NotFound("testing") bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) - blob._properties["mediaLink"] = media_link - blob._do_download = mock.Mock() - blob._do_download.side_effect = grmp_response - file_obj = io.BytesIO() - with self.assertRaises(exceptions.NotFound): + + with self.assertRaises(NotFound): blob.download_to_file(file_obj) self.assertEqual(file_obj.tell(), 0) - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + expected_timeout = self._get_default_timeout() + client.download_blob_to_file.assert_called_once_with( + blob, file_obj, - media_link, - headers, - None, - None, - False, - timeout=self._get_default_timeout(), + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=False, + timeout=expected_timeout, checksum="md5", + retry=DEFAULT_RETRY, ) def test_download_to_file_wo_media_link(self): @@ -1267,7 +1359,6 @@ def test_download_to_file_wo_media_link(self): client = self._make_client() bucket = _Bucket(client) blob = self._make_one(blob_name, bucket=bucket) - blob._do_download = mock.Mock() file_obj = io.BytesIO() blob.download_to_file(file_obj) @@ -1275,53 +1366,49 @@ def test_download_to_file_wo_media_link(self): # Make sure the media link is still unknown. self.assertIsNone(blob.media_link) - expected_url = ( - "https://storage.googleapis.com/download/storage/v1/b/" - "name/o/blob-name?alt=media" - ) - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + expected_timeout = self._get_default_timeout() + client.download_blob_to_file.assert_called_once_with( + blob, file_obj, - expected_url, - headers, - None, - None, - False, - timeout=self._get_default_timeout(), + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=False, + timeout=expected_timeout, checksum="md5", + retry=DEFAULT_RETRY, ) def test_download_to_file_w_generation_match(self): - GENERATION_NUMBER = 6 - HEADERS = {"accept-encoding": "gzip"} - EXPECTED_URL = ( - "https://storage.googleapis.com/download/storage/v1/b/" - "name/o/blob-name?alt=media&ifGenerationNotMatch={}".format( - GENERATION_NUMBER - ) - ) - + generation_number = 6 client = self._make_client() blob = self._make_one("blob-name", bucket=_Bucket(client)) - blob._do_download = mock.Mock() file_obj = io.BytesIO() - blob.download_to_file(file_obj, if_generation_not_match=GENERATION_NUMBER) + blob.download_to_file(file_obj, if_generation_not_match=generation_number) - blob._do_download.assert_called_once_with( - client._http, + expected_timeout = self._get_default_timeout() + client.download_blob_to_file.assert_called_once_with( + blob, file_obj, - EXPECTED_URL, - HEADERS, - None, - None, - False, - timeout=self._get_default_timeout(), + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=generation_number, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=False, + timeout=expected_timeout, checksum="md5", + retry=DEFAULT_RETRY, ) - def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): + def _download_to_file_helper( + self, use_chunks, raw_download, timeout=None, **extra_kwargs + ): blob_name = "blob-name" client = self._make_client() bucket = _Bucket(client) @@ -1331,7 +1418,6 @@ def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): if use_chunks: blob._CHUNK_SIZE_MULTIPLE = 1 blob.chunk_size = 3 - blob._do_download = mock.Mock() if timeout is None: expected_timeout = self._get_default_timeout() @@ -1340,28 +1426,36 @@ def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): expected_timeout = timeout timeout_kwarg = {"timeout": timeout} + extra_kwargs.update(timeout_kwarg) + file_obj = io.BytesIO() if raw_download: - blob.download_to_file(file_obj, raw_download=True, **timeout_kwarg) + blob.download_to_file(file_obj, raw_download=True, **extra_kwargs) else: - blob.download_to_file(file_obj, **timeout_kwarg) + blob.download_to_file(file_obj, **extra_kwargs) - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + expected_retry = extra_kwargs.get("retry", DEFAULT_RETRY) + client.download_blob_to_file.assert_called_once_with( + blob, file_obj, - media_link, - headers, - None, - None, - raw_download, + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=raw_download, timeout=expected_timeout, checksum="md5", + retry=expected_retry, ) def test_download_to_file_wo_chunks_wo_raw(self): self._download_to_file_helper(use_chunks=False, raw_download=False) + def test_download_to_file_wo_chunks_no_retry(self): + self._download_to_file_helper(use_chunks=False, raw_download=False, retry=None) + def test_download_to_file_w_chunks_wo_raw(self): self._download_to_file_helper(use_chunks=True, raw_download=False) @@ -1376,7 +1470,9 @@ def test_download_to_file_w_custom_timeout(self): use_chunks=False, raw_download=False, timeout=9.58 ) - def _download_to_filename_helper(self, updated, raw_download, timeout=None): + def _download_to_filename_helper( + self, updated, raw_download, timeout=None, **extra_kwargs + ): import os from google.cloud.storage._helpers import _convert_to_timestamp from google.cloud._testing import _NamedTemporaryFile @@ -1384,20 +1480,23 @@ def _download_to_filename_helper(self, updated, raw_download, timeout=None): blob_name = "blob-name" client = self._make_client() bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} + properties = {} if updated is not None: properties["updated"] = updated blob = self._make_one(blob_name, bucket=bucket, properties=properties) - blob._do_download = mock.Mock() with _NamedTemporaryFile() as temp: if timeout is None: - blob.download_to_filename(temp.name, raw_download=raw_download) + blob.download_to_filename( + temp.name, raw_download=raw_download, **extra_kwargs + ) else: blob.download_to_filename( - temp.name, raw_download=raw_download, timeout=timeout, + temp.name, + raw_download=raw_download, + timeout=timeout, + **extra_kwargs ) if updated is None: @@ -1412,55 +1511,35 @@ def _download_to_filename_helper(self, updated, raw_download, timeout=None): expected_timeout = self._get_default_timeout() if timeout is None else timeout - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + expected_retry = extra_kwargs.get("retry", DEFAULT_RETRY) + + client.download_blob_to_file.assert_called_once_with( + blob, mock.ANY, - media_link, - headers, - None, - None, - raw_download, + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=raw_download, timeout=expected_timeout, checksum="md5", + retry=expected_retry, ) - stream = blob._do_download.mock_calls[0].args[1] + stream = client.download_blob_to_file.mock_calls[0].args[1] self.assertEqual(stream.name, temp.name) - def test_download_to_filename_w_generation_match(self): - from google.cloud._testing import _NamedTemporaryFile - - GENERATION_NUMBER = 6 - MEDIA_LINK = "http://example.com/media/" - EXPECTED_LINK = MEDIA_LINK + "?ifGenerationMatch={}".format(GENERATION_NUMBER) - HEADERS = {"accept-encoding": "gzip"} - - client = self._make_client() - - blob = self._make_one( - "blob-name", bucket=_Bucket(client), properties={"mediaLink": MEDIA_LINK} - ) - blob._do_download = mock.Mock() - - with _NamedTemporaryFile() as temp: - blob.download_to_filename(temp.name, if_generation_match=GENERATION_NUMBER) - - blob._do_download.assert_called_once_with( - client._http, - mock.ANY, - EXPECTED_LINK, - HEADERS, - None, - None, - False, - timeout=self._get_default_timeout(), - checksum="md5", - ) - def test_download_to_filename_w_updated_wo_raw(self): updated = "2014-12-06T13:13:50.690Z" self._download_to_filename_helper(updated=updated, raw_download=False) + def test_download_to_filename_w_updated_no_retry(self): + updated = "2014-12-06T13:13:50.690Z" + self._download_to_filename_helper( + updated=updated, raw_download=False, retry=None + ) + def test_download_to_filename_wo_updated_wo_raw(self): self._download_to_filename_helper(updated=None, raw_download=False) @@ -1476,18 +1555,42 @@ def test_download_to_filename_w_custom_timeout(self): updated=None, raw_download=False, timeout=9.58 ) + def test_download_to_filename_w_generation_match(self): + from google.cloud._testing import _NamedTemporaryFile + + generation_number = 6 + client = self._make_client() + blob = self._make_one("blob-name", bucket=_Bucket(client)) + + with _NamedTemporaryFile() as temp: + blob.download_to_filename(temp.name, if_generation_match=generation_number) + + expected_timeout = self._get_default_timeout() + client.download_blob_to_file.assert_called_once_with( + blob, + mock.ANY, + start=None, + end=None, + if_generation_match=generation_number, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=False, + timeout=expected_timeout, + checksum="md5", + retry=DEFAULT_RETRY, + ) + stream = client.download_blob_to_file.mock_calls[0].args[1] + self.assertEqual(stream.name, temp.name) + def test_download_to_filename_corrupted(self): from google.resumable_media import DataCorruption blob_name = "blob-name" client = self._make_client() bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} - - blob = self._make_one(blob_name, bucket=bucket, properties=properties) - blob._do_download = mock.Mock() - blob._do_download.side_effect = DataCorruption("testing") + blob = self._make_one(blob_name, bucket=bucket) + client.download_blob_to_file.side_effect = DataCorruption("testing") # Try to download into a temporary file (don't use # `_NamedTemporaryFile` it will try to remove after the file is @@ -1502,180 +1605,61 @@ def test_download_to_filename_corrupted(self): # Make sure the file was cleaned up. self.assertFalse(os.path.exists(filename)) - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, + expected_timeout = self._get_default_timeout() + client.download_blob_to_file.assert_called_once_with( + blob, mock.ANY, - media_link, - headers, - None, - None, - False, - timeout=self._get_default_timeout(), + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=False, + timeout=expected_timeout, checksum="md5", + retry=DEFAULT_RETRY, ) - stream = blob._do_download.mock_calls[0].args[1] + stream = client.download_blob_to_file.mock_calls[0].args[1] self.assertEqual(stream.name, filename) - def test_download_to_filename_w_key(self): - from google.cloud._testing import _NamedTemporaryFile - from google.cloud.storage.blob import _get_encryption_headers - - blob_name = "blob-name" - # Create a fake client/bucket and use them in the Blob() constructor. - client = self._make_client() - bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} - key = b"aa426195405adee2c8081bb9e7e74b19" - blob = self._make_one( - blob_name, bucket=bucket, properties=properties, encryption_key=key - ) - blob._do_download = mock.Mock() - - with _NamedTemporaryFile() as temp: - blob.download_to_filename(temp.name) - - headers = {"accept-encoding": "gzip"} - headers.update(_get_encryption_headers(key)) - blob._do_download.assert_called_once_with( - client._http, - mock.ANY, - media_link, - headers, - None, - None, - False, - timeout=self._get_default_timeout(), - checksum="md5", - ) - stream = blob._do_download.mock_calls[0].args[1] - self.assertEqual(stream.name, temp.name) - - def _download_as_bytes_helper(self, raw_download, timeout=None): + def _download_as_bytes_helper(self, raw_download, timeout=None, **extra_kwargs): blob_name = "blob-name" client = self._make_client() bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} - blob = self._make_one(blob_name, bucket=bucket, properties=properties) - blob._do_download = mock.Mock() + blob = self._make_one(blob_name, bucket=bucket) if timeout is None: expected_timeout = self._get_default_timeout() - fetched = blob.download_as_bytes(raw_download=raw_download) + fetched = blob.download_as_bytes(raw_download=raw_download, **extra_kwargs) else: expected_timeout = timeout - fetched = blob.download_as_bytes(raw_download=raw_download, timeout=timeout) + fetched = blob.download_as_bytes( + raw_download=raw_download, timeout=timeout, **extra_kwargs + ) self.assertEqual(fetched, b"") - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, - mock.ANY, - media_link, - headers, - None, - None, - raw_download, - timeout=expected_timeout, - checksum="md5", - ) - stream = blob._do_download.mock_calls[0].args[1] - self.assertIsInstance(stream, io.BytesIO) - - def test_download_as_string_w_response_headers(self): - blob_name = "blob-name" - client = mock.Mock(spec=["_http"]) - bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} - blob = self._make_one(blob_name, bucket=bucket, properties=properties) - - response = self._mock_requests_response( - http_client.OK, - headers={ - "Content-Type": "application/json", - "Content-Language": "ko-kr", - "Cache-Control": "max-age=1337;public", - "Content-Encoding": "gzip", - "X-Goog-Storage-Class": "STANDARD", - "X-Goog-Hash": "crc32c=4gcgLQ==,md5=CS9tHYTtyFntzj7B9nkkJQ==", - }, - # { "x": 5 } gzipped - content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00", - ) - blob._extract_headers_from_download(response) - - self.assertEqual(blob.content_type, "application/json") - self.assertEqual(blob.content_language, "ko-kr") - self.assertEqual(blob.content_encoding, "gzip") - self.assertEqual(blob.cache_control, "max-age=1337;public") - self.assertEqual(blob.storage_class, "STANDARD") - self.assertEqual(blob.md5_hash, "CS9tHYTtyFntzj7B9nkkJQ==") - self.assertEqual(blob.crc32c, "4gcgLQ==") - - response = self._mock_requests_response( - http_client.OK, - headers={ - "Content-Type": "application/octet-stream", - "Content-Language": "en-US", - "Cache-Control": "max-age=1337;public", - "Content-Encoding": "gzip", - "X-Goog-Storage-Class": "STANDARD", - "X-Goog-Hash": "crc32c=4/c+LQ==,md5=CS9tHYTt/+ntzj7B9nkkJQ==", - }, - content=b"", - ) - blob._extract_headers_from_download(response) - self.assertEqual(blob.content_type, "application/octet-stream") - self.assertEqual(blob.content_language, "en-US") - self.assertEqual(blob.md5_hash, "CS9tHYTt/+ntzj7B9nkkJQ==") - self.assertEqual(blob.crc32c, "4/c+LQ==") - - def test_download_as_string_w_hash_response_header_none(self): - blob_name = "blob-name" - md5_hash = "CS9tHYTtyFntzj7B9nkkJQ==" - crc32c = "4gcgLQ==" - client = mock.Mock(spec=["_http"]) - bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = { - "mediaLink": media_link, - "md5Hash": md5_hash, - "crc32c": crc32c, - } - blob = self._make_one(blob_name, bucket=bucket, properties=properties) + expected_retry = extra_kwargs.get("retry", DEFAULT_RETRY) - response = self._mock_requests_response( - http_client.OK, - headers={"X-Goog-Hash": ""}, - # { "x": 5 } gzipped - content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00", - ) - blob._extract_headers_from_download(response) - - self.assertEqual(blob.md5_hash, md5_hash) - self.assertEqual(blob.crc32c, crc32c) - - def test_download_as_string_w_response_headers_not_match(self): - blob_name = "blob-name" - client = mock.Mock(spec=["_http"]) - bucket = _Bucket(client) - media_link = "http://example.com/media/" - properties = {"mediaLink": media_link} - blob = self._make_one(blob_name, bucket=bucket, properties=properties) - - response = self._mock_requests_response( - http_client.OK, - headers={"X-Goog-Hash": "bogus=4gcgLQ==,"}, - # { "x": 5 } gzipped - content=b"", + client.download_blob_to_file.assert_called_once_with( + blob, + mock.ANY, + start=None, + end=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + raw_download=raw_download, + timeout=expected_timeout, + checksum="md5", + retry=expected_retry, ) - blob._extract_headers_from_download(response) + stream = client.download_blob_to_file.mock_calls[0].args[1] + self.assertIsInstance(stream, io.BytesIO) - self.assertIsNone(blob.md5_hash) - self.assertIsNone(blob.crc32c) + def test_download_as_bytes_w_custom_timeout(self): + self._download_as_bytes_helper(raw_download=False, timeout=9.58) def test_download_as_bytes_w_generation_match(self): GENERATION_NUMBER = 6 @@ -1702,11 +1686,15 @@ def test_download_as_bytes_w_generation_match(self): if_metageneration_not_match=None, timeout=self._get_default_timeout(), checksum="md5", + retry=DEFAULT_RETRY, ) def test_download_as_bytes_wo_raw(self): self._download_as_bytes_helper(raw_download=False) + def test_download_as_bytes_no_retry(self): + self._download_as_bytes_helper(raw_download=False, retry=None) + def test_download_as_bytes_w_raw(self): self._download_as_bytes_helper(raw_download=True) @@ -1729,6 +1717,7 @@ def _download_as_text_helper( no_charset=False, expected_value=u"DEADBEEF", payload=None, + **extra_kwargs ): if payload is None: if encoding is not None: @@ -1737,7 +1726,8 @@ def _download_as_text_helper( payload = expected_value.encode() blob_name = "blob-name" - bucket = _Bucket() + bucket_client = self._make_client() + bucket = _Bucket(bucket_client) properties = {} if charset is not None: @@ -1779,10 +1769,14 @@ def _download_as_text_helper( else: kwargs["timeout"] = expected_timeout = timeout + kwargs.update(extra_kwargs) + fetched = blob.download_as_text(**kwargs) self.assertEqual(fetched, expected_value) + expected_retry = extra_kwargs.get("retry", DEFAULT_RETRY) + blob.download_as_bytes.assert_called_once_with( client=client, start=start, @@ -1793,11 +1787,15 @@ def _download_as_text_helper( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + retry=expected_retry, ) def test_download_as_text_wo_raw(self): self._download_as_text_helper(raw_download=False) + def test_download_as_text_w_no_retry(self): + self._download_as_text_helper(raw_download=False, retry=None) + def test_download_as_text_w_raw(self): self._download_as_text_helper(raw_download=True) @@ -1885,6 +1883,7 @@ def test_download_as_string(self, mock_warn): if_metageneration_not_match=None, timeout=self._get_default_timeout(), checksum="md5", + retry=DEFAULT_RETRY, ) mock_warn.assert_called_with( @@ -1894,6 +1893,33 @@ def test_download_as_string(self, mock_warn): stacklevel=1, ) + def test_download_as_string_no_retry(self): + MEDIA_LINK = "http://example.com/media/" + + client = self._make_client() + blob = self._make_one( + "blob-name", bucket=_Bucket(client), properties={"mediaLink": MEDIA_LINK} + ) + client.download_blob_to_file = mock.Mock() + + fetched = blob.download_as_string(retry=None) + self.assertEqual(fetched, b"") + + client.download_blob_to_file.assert_called_once_with( + blob, + mock.ANY, + start=None, + end=None, + raw_download=False, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + checksum="md5", + retry=None, + ) + def test__get_content_type_explicit(self): blob = self._make_one(u"blob-name", bucket=None) @@ -2014,6 +2040,7 @@ def _do_multipart_success( timeout=None, metadata=None, mtls=False, + retry=None, ): from six.moves.urllib.parse import urlencode @@ -2062,6 +2089,7 @@ def _do_multipart_success( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + retry=retry, **timeout_kwarg ) @@ -2133,6 +2161,28 @@ def _do_multipart_success( def test__do_multipart_upload_no_size(self, mock_get_boundary): self._do_multipart_success(mock_get_boundary, predefined_acl="private") + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + def test__do_multipart_upload_no_size_retry(self, mock_get_boundary): + self._do_multipart_success( + mock_get_boundary, predefined_acl="private", retry=DEFAULT_RETRY + ) + + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + def test__do_multipart_upload_no_size_num_retries(self, mock_get_boundary): + self._do_multipart_success( + mock_get_boundary, predefined_acl="private", num_retries=2 + ) + + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + def test__do_multipart_upload_no_size_retry_conflict(self, mock_get_boundary): + with self.assertRaises(ValueError): + self._do_multipart_success( + mock_get_boundary, + predefined_acl="private", + num_retries=2, + retry=DEFAULT_RETRY, + ) + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_no_size_mtls(self, mock_get_boundary): self._do_multipart_success( @@ -2171,7 +2221,7 @@ def test__do_multipart_upload_with_kms_with_version(self, mock_get_boundary): @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_retry(self, mock_get_boundary): - self._do_multipart_success(mock_get_boundary, num_retries=8) + self._do_multipart_success(mock_get_boundary, retry=DEFAULT_RETRY) @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_generation_match(self, mock_get_boundary): @@ -2235,6 +2285,7 @@ def _initiate_resumable_helper( timeout=None, metadata=None, mtls=False, + retry=None, ): from six.moves.urllib.parse import urlencode from google.resumable_media.requests import ResumableUpload @@ -2305,6 +2356,7 @@ def _initiate_resumable_helper( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + retry=retry, **timeout_kwarg ) @@ -2370,13 +2422,15 @@ def _initiate_resumable_helper( self.assertEqual(upload._content_type, content_type) self.assertEqual(upload.resumable_url, resumable_url) retry_strategy = upload._retry_strategy - self.assertEqual(retry_strategy.max_sleep, 64.0) - if num_retries is None: - self.assertEqual(retry_strategy.max_cumulative_retry, 600.0) - self.assertIsNone(retry_strategy.max_retries) - else: - self.assertIsNone(retry_strategy.max_cumulative_retry) + self.assertFalse(num_retries is not None and retry is not None) + if num_retries is not None and retry is None: self.assertEqual(retry_strategy.max_retries, num_retries) + elif retry is None: + self.assertEqual(retry_strategy.max_retries, 0) + else: + self.assertEqual(retry_strategy.max_sleep, 60.0) + self.assertEqual(retry_strategy.max_cumulative_retry, 120.0) + self.assertIsNone(retry_strategy.max_retries) self.assertIs(client._http, transport) # Make sure we never read from the stream. self.assertEqual(stream.tell(), 0) @@ -2453,8 +2507,15 @@ def test__initiate_resumable_upload_with_extra_headers(self): self._initiate_resumable_helper(extra_headers=extra_headers) def test__initiate_resumable_upload_with_retry(self): + self._initiate_resumable_helper(retry=DEFAULT_RETRY) + + def test__initiate_resumable_upload_with_num_retries(self): self._initiate_resumable_helper(num_retries=11) + def test__initiate_resumable_upload_with_retry_conflict(self): + with self.assertRaises(ValueError): + self._initiate_resumable_helper(retry=DEFAULT_RETRY, num_retries=2) + def test__initiate_resumable_upload_with_generation_match(self): self._initiate_resumable_helper( if_generation_match=4, if_metageneration_match=4 @@ -2606,6 +2667,7 @@ def _do_resumable_helper( if_metageneration_not_match=None, timeout=None, data_corruption=False, + retry=None, ): bucket = _Bucket(name="yesterday") blob = self._make_one(u"blob-name", bucket=bucket) @@ -2652,6 +2714,7 @@ def _do_resumable_helper( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + retry=retry, **timeout_kwarg ) @@ -2709,7 +2772,14 @@ def test__do_resumable_upload_with_size(self): self._do_resumable_helper(use_size=True) def test__do_resumable_upload_with_retry(self): - self._do_resumable_helper(num_retries=6) + self._do_resumable_helper(retry=DEFAULT_RETRY) + + def test__do_resumable_upload_with_num_retries(self): + self._do_resumable_helper(num_retries=8) + + def test__do_resumable_upload_with_retry_conflict(self): + with self.assertRaises(ValueError): + self._do_resumable_helper(num_retries=9, retry=DEFAULT_RETRY) def test__do_resumable_upload_with_predefined_acl(self): self._do_resumable_helper(predefined_acl="private") @@ -2735,6 +2805,7 @@ def _do_upload_helper( if_metageneration_not_match=None, size=None, timeout=None, + retry=None, ): from google.cloud.storage.blob import _MAX_MULTIPART_SIZE @@ -2778,13 +2849,12 @@ def _do_upload_helper( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + retry=retry, **timeout_kwarg ) - # Adjust num_retries expectations to reflect the conditional default in - # _do_upload() - if num_retries is None and if_metageneration_match is None: - num_retries = 0 + if retry is DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED: + retry = DEFAULT_RETRY if if_metageneration_match else None self.assertIs(created_json, mock.sentinel.json) response.json.assert_called_once_with() @@ -2802,6 +2872,7 @@ def _do_upload_helper( if_metageneration_not_match, timeout=expected_timeout, checksum=None, + retry=retry, ) blob._do_resumable_upload.assert_not_called() else: @@ -2819,6 +2890,7 @@ def _do_upload_helper( if_metageneration_not_match, timeout=expected_timeout, checksum=None, + retry=retry, ) def test__do_upload_uses_multipart(self): @@ -2846,7 +2918,18 @@ def test__do_upload_uses_resumable_w_custom_timeout(self): ) def test__do_upload_with_retry(self): - self._do_upload_helper(num_retries=20) + self._do_upload_helper(retry=DEFAULT_RETRY) + + def test__do_upload_with_num_retries(self): + self._do_upload_helper(num_retries=2) + + def test__do_upload_with_conditional_retry_success(self): + self._do_upload_helper( + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, if_metageneration_match=1 + ) + + def test__do_upload_with_conditional_retry_failure(self): + self._do_upload_helper(retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED) def _upload_from_file_helper(self, side_effect=None, **kwargs): from google.cloud._helpers import UTC @@ -2870,6 +2953,11 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): if_generation_not_match = kwargs.get("if_generation_not_match", None) if_metageneration_match = kwargs.get("if_metageneration_match", None) if_metageneration_not_match = kwargs.get("if_metageneration_not_match", None) + num_retries = kwargs.get("num_retries", None) + default_retry = ( + DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED if not num_retries else None + ) + retry = kwargs.get("retry", default_retry) ret_val = blob.upload_from_file( stream, size=len(data), content_type=content_type, client=client, **kwargs ) @@ -2881,8 +2969,6 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): expected_timeout = kwargs.get("timeout", self._get_default_timeout()) - # Check the mock. - num_retries = kwargs.get("num_retries") blob._do_upload.assert_called_once_with( client, stream, @@ -2896,6 +2982,7 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): if_metageneration_not_match, timeout=expected_timeout, checksum=None, + retry=retry, ) return stream @@ -2905,13 +2992,24 @@ def test_upload_from_file_success(self): @mock.patch("warnings.warn") def test_upload_from_file_with_retries(self, mock_warn): + self._upload_from_file_helper(retry=DEFAULT_RETRY) + + @mock.patch("warnings.warn") + def test_upload_from_file_with_num_retries(self, mock_warn): from google.cloud.storage import blob as blob_module - self._upload_from_file_helper(num_retries=20) + self._upload_from_file_helper(num_retries=2) mock_warn.assert_called_once_with( blob_module._NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2 ) + @mock.patch("warnings.warn") + def test_upload_from_file_with_retry_conflict(self, mock_warn): + # Special case here: in a conflict this method should NOT raise an error + # as that's handled further downstream. It should pass both options + # through. + self._upload_from_file_helper(retry=DEFAULT_RETRY, num_retries=2) + def test_upload_from_file_with_rewind(self): stream = self._upload_from_file_helper(rewind=True) assert stream.tell() == 0 @@ -2938,7 +3036,14 @@ def test_upload_from_file_failure(self): self.assertEqual(exc_info.exception.errors, []) def _do_upload_mock_call_helper( - self, blob, client, content_type, size, timeout=None + self, + blob, + client, + content_type, + size, + timeout=None, + num_retries=None, + retry=None, ): self.assertEqual(blob._do_upload.call_count, 1) mock_call = blob._do_upload.mock_calls[0] @@ -2948,7 +3053,7 @@ def _do_upload_mock_call_helper( self.assertEqual(pos_args[0], client) self.assertEqual(pos_args[2], content_type) self.assertEqual(pos_args[3], size) - self.assertIsNone(pos_args[4]) # num_retries + self.assertEqual(pos_args[4], num_retries) # num_retries self.assertIsNone(pos_args[5]) # predefined_acl self.assertIsNone(pos_args[6]) # if_generation_match self.assertIsNone(pos_args[7]) # if_generation_not_match @@ -2956,7 +3061,13 @@ def _do_upload_mock_call_helper( self.assertIsNone(pos_args[9]) # if_metageneration_not_match expected_timeout = self._get_default_timeout() if timeout is None else timeout - self.assertEqual(kwargs, {"timeout": expected_timeout, "checksum": None}) + if not retry: + retry = ( + DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED if not num_retries else None + ) + self.assertEqual( + kwargs, {"timeout": expected_timeout, "checksum": None, "retry": retry} + ) return pos_args[1] @@ -2991,6 +3102,72 @@ def test_upload_from_filename(self): self.assertEqual(stream.mode, "rb") self.assertEqual(stream.name, temp.name) + def test_upload_from_filename_with_retry(self): + from google.cloud._testing import _NamedTemporaryFile + + blob = self._make_one("blob-name", bucket=None) + # Mock low-level upload helper on blob (it is tested elsewhere). + created_json = {"metadata": {"mint": "ice-cream"}} + blob._do_upload = mock.Mock(return_value=created_json, spec=[]) + # Make sure `metadata` is empty before the request. + self.assertIsNone(blob.metadata) + + data = b"soooo much data" + content_type = u"image/svg+xml" + client = mock.sentinel.client + with _NamedTemporaryFile() as temp: + with open(temp.name, "wb") as file_obj: + file_obj.write(data) + + ret_val = blob.upload_from_filename( + temp.name, content_type=content_type, client=client, retry=DEFAULT_RETRY + ) + + # Check the response and side-effects. + self.assertIsNone(ret_val) + self.assertEqual(blob.metadata, created_json["metadata"]) + + # Check the mock. + stream = self._do_upload_mock_call_helper( + blob, client, content_type, len(data), retry=DEFAULT_RETRY + ) + self.assertTrue(stream.closed) + self.assertEqual(stream.mode, "rb") + self.assertEqual(stream.name, temp.name) + + def test_upload_from_filename_with_num_retries(self): + from google.cloud._testing import _NamedTemporaryFile + + blob = self._make_one("blob-name", bucket=None) + # Mock low-level upload helper on blob (it is tested elsewhere). + created_json = {"metadata": {"mint": "ice-cream"}} + blob._do_upload = mock.Mock(return_value=created_json, spec=[]) + # Make sure `metadata` is empty before the request. + self.assertIsNone(blob.metadata) + + data = b"soooo much data" + content_type = u"image/svg+xml" + client = mock.sentinel.client + with _NamedTemporaryFile() as temp: + with open(temp.name, "wb") as file_obj: + file_obj.write(data) + + ret_val = blob.upload_from_filename( + temp.name, content_type=content_type, client=client, num_retries=2 + ) + + # Check the response and side-effects. + self.assertIsNone(ret_val) + self.assertEqual(blob.metadata, created_json["metadata"]) + + # Check the mock. + stream = self._do_upload_mock_call_helper( + blob, client, content_type, len(data), num_retries=2 + ) + self.assertTrue(stream.closed) + self.assertEqual(stream.mode, "rb") + self.assertEqual(stream.name, temp.name) + def test_upload_from_filename_w_custom_timeout(self): from google.cloud._testing import _NamedTemporaryFile @@ -3035,6 +3212,11 @@ def _upload_from_string_helper(self, data, **kwargs): self.assertIsNone(ret_val) self.assertEqual(blob.component_count, 5) + extra_kwargs = {} + if "retry" in kwargs: + extra_kwargs["retry"] = kwargs["retry"] + if "num_retries" in kwargs: + extra_kwargs["num_retries"] = kwargs["num_retries"] # Check the mock. payload = _to_bytes(data, encoding="utf-8") stream = self._do_upload_mock_call_helper( @@ -3043,6 +3225,7 @@ def _upload_from_string_helper(self, data, **kwargs): "text/plain", len(payload), kwargs.get("timeout", self._get_default_timeout()), + **extra_kwargs ) self.assertIsInstance(stream, io.BytesIO) self.assertEqual(stream.getvalue(), payload) @@ -3059,6 +3242,14 @@ def test_upload_from_string_w_text(self): data = u"\N{snowman} \N{sailboat}" self._upload_from_string_helper(data) + def test_upload_from_string_w_text_w_retry(self): + data = u"\N{snowman} \N{sailboat}" + self._upload_from_string_helper(data, retry=DEFAULT_RETRY) + + def test_upload_from_string_w_text_w_num_retries(self): + data = u"\N{snowman} \N{sailboat}" + self._upload_from_string_helper(data, num_retries=2) + def _create_resumable_upload_session_helper( self, origin=None, side_effect=None, timeout=None ): @@ -3146,139 +3337,128 @@ def test_create_resumable_upload_session_with_failure(self): self.assertIn(message, exc_info.exception.message) self.assertEqual(exc_info.exception.errors, []) - def test_get_iam_policy(self): + def test_get_iam_policy_defaults(self): from google.cloud.storage.iam import STORAGE_OWNER_ROLE from google.cloud.storage.iam import STORAGE_EDITOR_ROLE from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.api_core.iam import Policy - BLOB_NAME = "blob-name" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - EDITOR1 = "domain:google.com" - EDITOR2 = "user:phred@example.com" - VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" - VIEWER2 = "user:phred@example.com" - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, + blob_name = "blob-name" + path = "/b/name/o/%s" % (blob_name,) + etag = "DEADBEEF" + version = 1 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + editor1 = "domain:google.com" + editor2 = "user:phred@example.com" + viewer1 = "serviceAccount:1234-abcdef@service.example.com" + viewer2 = "user:phred@example.com" + api_response = { + "resourceId": path, + "etag": etag, + "version": version, "bindings": [ - {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, - {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, - {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + {"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [editor1, editor2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [viewer1, viewer2]}, ], } - after = ({"status": http_client.OK}, RETURNED) - EXPECTED = { - binding["role"]: set(binding["members"]) for binding in RETURNED["bindings"] + expected_policy = { + binding["role"]: set(binding["members"]) + for binding in api_response["bindings"] } - connection = _Connection(after) - client = _Client(connection) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) - policy = blob.get_iam_policy(timeout=42) + policy = blob.get_iam_policy() self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED["etag"]) - self.assertEqual(policy.version, RETURNED["version"]) - self.assertEqual(dict(policy), EXPECTED) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "%s/iam" % (PATH,), - "query_params": {}, - "_target_object": None, - "timeout": 42, - "retry": DEFAULT_RETRY, - }, + self.assertEqual(policy.etag, api_response["etag"]) + self.assertEqual(policy.version, api_response["version"]) + self.assertEqual(dict(policy), expected_policy) + + expected_path = "%s/iam" % (path,) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, ) - def test_get_iam_policy_w_requested_policy_version(self): - from google.cloud.storage.iam import STORAGE_OWNER_ROLE + def test_get_iam_policy_w_user_project_w_timeout(self): + from google.api_core.iam import Policy - BLOB_NAME = "blob-name" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, - "bindings": [{"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}], + blob_name = "blob-name" + user_project = "user-project-123" + timeout = 42 + path = "/b/name/o/%s" % (blob_name,) + etag = "DEADBEEF" + version = 1 + api_response = { + "resourceId": path, + "etag": etag, + "version": version, + "bindings": [], } - after = ({"status": http_client.OK}, RETURNED) - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + expected_policy = {} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = _Bucket(client=client, user_project=user_project) + blob = self._make_one(blob_name, bucket=bucket) - blob.get_iam_policy(requested_policy_version=3) + policy = blob.get_iam_policy(timeout=42) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "%s/iam" % (PATH,), - "query_params": {"optionsRequestedPolicyVersion": 3}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, + self.assertIsInstance(policy, Policy) + self.assertEqual(policy.etag, api_response["etag"]) + self.assertEqual(policy.version, api_response["version"]) + self.assertEqual(dict(policy), expected_policy) + + expected_path = "%s/iam" % (path,) + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=None, ) - def test_get_iam_policy_w_user_project(self): - from google.api_core.iam import Policy + def test_get_iam_policy_w_requested_policy_version(self): + from google.cloud.storage.iam import STORAGE_OWNER_ROLE - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, - "bindings": [], + blob_name = "blob-name" + path = "/b/name/o/%s" % (blob_name,) + etag = "DEADBEEF" + version = 3 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + api_response = { + "resourceId": path, + "etag": etag, + "version": version, + "bindings": [{"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}], } - after = ({"status": http_client.OK}, RETURNED) - EXPECTED = {} - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = _Bucket(client=client) + blob = self._make_one(blob_name, bucket=bucket) - policy = blob.get_iam_policy() + policy = blob.get_iam_policy(requested_policy_version=version) - self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED["etag"]) - self.assertEqual(policy.version, RETURNED["version"]) - self.assertEqual(dict(policy), EXPECTED) + self.assertEqual(policy.version, version) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "GET", - "path": "%s/iam" % (PATH,), - "query_params": {"userProject": USER_PROJECT}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - }, + expected_path = "%s/iam" % (path,) + expected_query_params = {"optionsRequestedPolicyVersion": version} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, ) def test_set_iam_policy(self): @@ -3288,928 +3468,1253 @@ def test_set_iam_policy(self): from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.api_core.iam import Policy - BLOB_NAME = "blob-name" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - EDITOR1 = "domain:google.com" - EDITOR2 = "user:phred@example.com" - VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" - VIEWER2 = "user:phred@example.com" - BINDINGS = [ - {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, - {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, - {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + blob_name = "blob-name" + path = "/b/name/o/%s" % (blob_name,) + etag = "DEADBEEF" + version = 1 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + editor1 = "domain:google.com" + editor2 = "user:phred@example.com" + viewer1 = "serviceAccount:1234-abcdef@service.example.com" + viewer2 = "user:phred@example.com" + bindings = [ + {"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [editor1, editor2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [viewer1, viewer2]}, ] - RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} - after = ({"status": http_client.OK}, RETURNED) + api_response = {"etag": etag, "version": version, "bindings": bindings} policy = Policy() - for binding in BINDINGS: + for binding in bindings: policy[binding["role"]] = binding["members"] - connection = _Connection(after) - client = _Client(connection) + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) - returned = blob.set_iam_policy(policy, timeout=42) + returned = blob.set_iam_policy(policy) - self.assertEqual(returned.etag, ETAG) - self.assertEqual(returned.version, VERSION) + self.assertEqual(returned.etag, etag) + self.assertEqual(returned.version, version) self.assertEqual(dict(returned), dict(policy)) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {}) - self.assertEqual(kw[0]["timeout"], 42) - sent = kw[0]["data"] - self.assertEqual(sent["resourceId"], PATH) - self.assertEqual(len(sent["bindings"]), len(BINDINGS)) + expected_path = "%s/iam" % (path,) + expected_data = { + "resourceId": path, + "bindings": mock.ANY, + } + expected_query_params = {} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_ETAG_IN_JSON, + _target_object=None, + ) + + sent_bindings = client._put_resource.call_args.args[1]["bindings"] key = operator.itemgetter("role") for found, expected in zip( - sorted(sent["bindings"], key=key), sorted(BINDINGS, key=key) + sorted(sent_bindings, key=key), sorted(bindings, key=key) ): self.assertEqual(found["role"], expected["role"]) self.assertEqual(sorted(found["members"]), sorted(expected["members"])) - def test_set_iam_policy_w_user_project(self): + def test_set_iam_policy_w_user_project_w_explicit_client_w_timeout_retry(self): from google.api_core.iam import Policy - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - BINDINGS = [] - RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} - after = ({"status": http_client.OK}, RETURNED) + blob_name = "blob-name" + user_project = "user-project-123" + path = "/b/name/o/%s" % (blob_name,) + etag = "DEADBEEF" + version = 1 + bindings = [] policy = Policy() - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) + api_response = {"etag": etag, "version": version, "bindings": bindings} + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + bucket = _Bucket(client=None, user_project=user_project) + blob = self._make_one(blob_name, bucket=bucket) + timeout = 42 + retry = mock.Mock(spec=[]) - returned = blob.set_iam_policy(policy) + returned = blob.set_iam_policy( + policy, client=client, timeout=timeout, retry=retry, + ) - self.assertEqual(returned.etag, ETAG) - self.assertEqual(returned.version, VERSION) + self.assertEqual(returned.etag, etag) + self.assertEqual(returned.version, version) self.assertEqual(dict(returned), dict(policy)) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw[0]["data"], {"resourceId": PATH}) + expected_path = "%s/iam" % (path,) + expected_data = { # bindings omitted + "resourceId": path, + } + expected_query_params = {"userProject": user_project} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=None, + ) - def test_test_iam_permissions(self): + def test_test_iam_permissions_defaults(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - BLOB_NAME = "blob-name" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - PERMISSIONS = [ + blob_name = "blob-name" + permissions = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] - ALLOWED = PERMISSIONS[1:] - RETURNED = {"permissions": ALLOWED} - after = ({"status": http_client.OK}, RETURNED) - connection = _Connection(after) - client = _Client(connection) + expected = permissions[1:] + api_response = {"permissions": expected} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) - allowed = blob.test_iam_permissions(PERMISSIONS, timeout=42) + found = blob.test_iam_permissions(permissions) - self.assertEqual(allowed, ALLOWED) + self.assertEqual(found, expected) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"permissions": PERMISSIONS}) - self.assertEqual(kw[0]["timeout"], 42) + expected_path = "/b/name/o/%s/iam/testPermissions" % (blob_name,) + expected_query_params = {"permissions": permissions} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) - def test_test_iam_permissions_w_user_project(self): + def test_test_iam_permissions_w_user_project_w_timeout_w_retry(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - PATH = "/b/name/o/%s" % (BLOB_NAME,) - PERMISSIONS = [ + blob_name = "blob-name" + user_project = "user-project-123" + timeout = 42 + retry = mock.Mock(spec=[]) + permissions = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] - ALLOWED = PERMISSIONS[1:] - RETURNED = {"permissions": ALLOWED} - after = ({"status": http_client.OK}, RETURNED) - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket) + expected = permissions[1:] + api_response = {"permissions": expected} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = _Bucket(client=client, user_project=user_project) + blob = self._make_one(blob_name, bucket=bucket) + + found = blob.test_iam_permissions(permissions, timeout=timeout, retry=retry) + + self.assertEqual(found, expected) + + expected_path = "/b/name/o/%s/iam/testPermissions" % (blob_name,) + expected_query_params = { + "permissions": permissions, + "userProject": user_project, + } + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=None, + ) + + def test_make_public_w_defaults(self): + from google.cloud.storage.acl import _ACLEntity + + blob_name = "blob-name" + permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] + api_response = {"acl": permissive} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + bucket = _Bucket(client=client) + blob = self._make_one(blob_name, bucket=bucket) + blob.acl.loaded = True + + blob.make_public() + + self.assertEqual(list(blob.acl), permissive) + + expected_patch_data = {"acl": permissive} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + blob.path, + expected_patch_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, + ) + + def test_make_public_w_timeout(self): + from google.cloud.storage.acl import _ACLEntity + + blob_name = "blob-name" + permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] + api_response = {"acl": permissive} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + bucket = _Bucket(client=client) + blob = self._make_one(blob_name, bucket=bucket) + blob.acl.loaded = True + timeout = 42 + + blob.make_public(timeout=timeout) + + self.assertEqual(list(blob.acl), permissive) + + expected_patch_data = {"acl": permissive} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + blob.path, + expected_patch_data, + query_params=expected_query_params, + timeout=timeout, + retry=None, + ) + + def test_make_private_w_defaults(self): + blob_name = "blob-name" + no_permissions = [] + api_response = {"acl": no_permissions} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + bucket = _Bucket(client=client) + blob = self._make_one(blob_name, bucket=bucket) + blob.acl.loaded = True - allowed = blob.test_iam_permissions(PERMISSIONS) + blob.make_private() - self.assertEqual(allowed, ALLOWED) + self.assertEqual(list(blob.acl), no_permissions) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) - self.assertEqual( - kw[0]["query_params"], - {"permissions": PERMISSIONS, "userProject": USER_PROJECT}, + expected_patch_data = {"acl": no_permissions} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + blob.path, + expected_patch_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, ) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - - def test_make_public(self): - from google.cloud.storage.acl import _ACLEntity - BLOB_NAME = "blob-name" - permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - after = ({"status": http_client.OK}, {"acl": permissive}) - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) - blob.acl.loaded = True - blob.make_public() - self.assertEqual(list(blob.acl), permissive) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/name/o/%s" % BLOB_NAME) - self.assertEqual(kw[0]["data"], {"acl": permissive}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - - def test_make_private(self): - BLOB_NAME = "blob-name" + def test_make_private_w_timeout(self): + blob_name = "blob-name" no_permissions = [] - after = ({"status": http_client.OK}, {"acl": no_permissions}) - connection = _Connection(after) - client = _Client(connection) + api_response = {"acl": no_permissions} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) blob.acl.loaded = True - blob.make_private() + timeout = 42 + + blob.make_private(timeout=timeout) + self.assertEqual(list(blob.acl), no_permissions) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/name/o/%s" % BLOB_NAME) - self.assertEqual(kw[0]["data"], {"acl": no_permissions}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) + + expected_patch_data = {"acl": no_permissions} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + blob.path, + expected_patch_data, + query_params=expected_query_params, + timeout=timeout, + retry=None, + ) def test_compose_wo_content_type_set(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - RESOURCE = {} - after = ({"status": http_client.OK}, RESOURCE) - connection = _Connection(after) - client = _Client(connection) + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) # no destination.content_type set destination.compose(sources=[source_1, source_2]) self.assertIsNone(destination.content_type) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "POST", - "path": "/b/name/o/%s/compose" % DESTINATION, - "query_params": {}, - "data": { - "sourceObjects": [{"name": source_1.name}, {"name": source_2.name}], - "destination": {}, - }, - "_target_object": destination, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - }, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + {"name": source_1.name, "generation": source_1.generation}, + {"name": source_2.name, "generation": source_2.generation}, + ], + "destination": {}, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, ) - def test_compose_minimal_w_user_project(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - RESOURCE = {"etag": "DEADBEEF"} - USER_PROJECT = "user-project-123" - after = ({"status": http_client.OK}, RESOURCE) - connection = _Connection(after) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) + def test_compose_minimal_w_user_project_w_timeout(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + api_response = {"etag": "DEADBEEF"} + user_project = "user-project-123" + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = _Bucket(client=client, user_project=user_project) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) destination.content_type = "text/plain" + timeout = 42 - destination.compose(sources=[source_1, source_2], timeout=42) + destination.compose(sources=[source_1, source_2], timeout=timeout) self.assertEqual(destination.etag, "DEADBEEF") - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "POST", - "path": "/b/name/o/%s/compose" % DESTINATION, - "query_params": {"userProject": USER_PROJECT}, - "data": { - "sourceObjects": [{"name": source_1.name}, {"name": source_2.name}], - "destination": {"contentType": "text/plain"}, - }, - "_target_object": destination, - "timeout": 42, - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - }, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + {"name": source_1.name, "generation": source_1.generation}, + {"name": source_2.name, "generation": source_2.generation}, + ], + "destination": {"contentType": "text/plain"}, + } + expected_query_params = {"userProject": user_project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, ) - def test_compose_w_additional_property_changes(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - RESOURCE = {"etag": "DEADBEEF"} - after = ({"status": http_client.OK}, RESOURCE) - connection = _Connection(after) - client = _Client(connection) + def test_compose_w_additional_property_changes_w_retry(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + api_response = {"etag": "DEADBEEF"} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) destination.content_type = "text/plain" destination.content_language = "en-US" destination.metadata = {"my-key": "my-value"} + retry = mock.Mock(spec=[]) - destination.compose(sources=[source_1, source_2]) + destination.compose(sources=[source_1, source_2], retry=retry) self.assertEqual(destination.etag, "DEADBEEF") - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "POST", - "path": "/b/name/o/%s/compose" % DESTINATION, - "query_params": {}, - "data": { - "sourceObjects": [{"name": source_1.name}, {"name": source_2.name}], - "destination": { - "contentType": "text/plain", - "contentLanguage": "en-US", - "metadata": {"my-key": "my-value"}, - }, - }, - "_target_object": destination, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + {"name": source_1.name, "generation": source_1.generation}, + {"name": source_2.name, "generation": source_2.generation}, + ], + "destination": { + "contentType": "text/plain", + "contentLanguage": "en-US", + "metadata": {"my-key": "my-value"}, }, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=destination, ) - def test_compose_w_generation_match(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - RESOURCE = {} - GENERATION_NUMBERS = [6, 9] - METAGENERATION_NUMBERS = [7, 1] - - after = ({"status": http_client.OK}, RESOURCE) - connection = _Connection(after) - client = _Client(connection) + def test_compose_w_source_generation_match(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + api_response = {} + source_generation_numbers = [6, 9] + + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) destination.compose( sources=[source_1, source_2], - if_generation_match=GENERATION_NUMBERS, - if_metageneration_match=METAGENERATION_NUMBERS, + if_source_generation_match=source_generation_numbers, ) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "POST", - "path": "/b/name/o/%s/compose" % DESTINATION, - "query_params": {}, - "data": { - "sourceObjects": [ - { - "name": source_1.name, - "objectPreconditions": { - "ifGenerationMatch": GENERATION_NUMBERS[0], - "ifMetagenerationMatch": METAGENERATION_NUMBERS[0], - }, - }, - { - "name": source_2.name, - "objectPreconditions": { - "ifGenerationMatch": GENERATION_NUMBERS[1], - "ifMetagenerationMatch": METAGENERATION_NUMBERS[1], - }, - }, - ], - "destination": {}, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + { + "name": source_1.name, + "generation": source_1.generation, + "objectPreconditions": { + "ifGenerationMatch": source_generation_numbers[0], + }, }, - "_target_object": destination, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - }, + { + "name": source_2.name, + "generation": source_2.generation, + "objectPreconditions": { + "ifGenerationMatch": source_generation_numbers[1], + }, + }, + ], + "destination": {}, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, ) - def test_compose_w_generation_match_bad_length(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - GENERATION_NUMBERS = [6] - METAGENERATION_NUMBERS = [7] - - after = ({"status": http_client.OK}, {}) - connection = _Connection(after) - client = _Client(connection) + def test_compose_w_source_generation_match_bad_length(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + source_generation_numbers = [6] + client = mock.Mock(spec=["_post_resource"]) bucket = _Bucket(client=client) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) with self.assertRaises(ValueError): destination.compose( - sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS + sources=[source_1, source_2], + if_source_generation_match=source_generation_numbers, ) + + client._post_resource.assert_not_called() + + def test_compose_w_source_generation_match_nones(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + source_generation_numbers = [6, None] + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = _Bucket(client=client) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) + + destination.compose( + sources=[source_1, source_2], + if_source_generation_match=source_generation_numbers, + ) + + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + { + "name": source_1.name, + "generation": source_1.generation, + "objectPreconditions": { + "ifGenerationMatch": source_generation_numbers[0], + }, + }, + {"name": source_2.name, "generation": source_2.generation}, + ], + "destination": {}, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, + ) + + def test_compose_w_generation_match(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + generation_number = 1 + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = _Bucket(client=client) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) + + destination.compose( + sources=[source_1, source_2], if_generation_match=generation_number, + ) + + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + {"name": source_1.name, "generation": source_1.generation}, + {"name": source_2.name, "generation": source_2.generation}, + ], + "destination": {}, + } + expected_query_params = {"ifGenerationMatch": generation_number} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, + ) + + @mock.patch("warnings.warn") + def test_compose_w_if_generation_match_list_w_warning(self, mock_warn): + from google.cloud.storage.blob import _COMPOSE_IF_GENERATION_LIST_DEPRECATED + + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + api_response = {} + generation_numbers = [6, 9] + + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = _Bucket(client=client) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + + destination = self._make_one(destination_name, bucket=bucket) + destination.compose( + sources=[source_1, source_2], if_generation_match=generation_numbers, + ) + + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + { + "name": source_1_name, + "generation": None, + "objectPreconditions": { + "ifGenerationMatch": generation_numbers[0], + }, + }, + { + "name": source_2_name, + "generation": None, + "objectPreconditions": { + "ifGenerationMatch": generation_numbers[1], + }, + }, + ], + "destination": {}, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, + ) + + mock_warn.assert_called_with( + _COMPOSE_IF_GENERATION_LIST_DEPRECATED, DeprecationWarning, stacklevel=2, + ) + + def test_compose_w_if_generation_match_and_if_s_generation_match(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + source_generation_numbers = [6, 8] + client = mock.Mock(spec=["_post_resource"]) + bucket = _Bucket(client=client) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + + destination = self._make_one(destination_name, bucket=bucket) + with self.assertRaises(ValueError): destination.compose( sources=[source_1, source_2], - if_metageneration_match=METAGENERATION_NUMBERS, + if_generation_match=source_generation_numbers, + if_source_generation_match=source_generation_numbers, ) - def test_compose_w_generation_match_nones(self): - SOURCE_1 = "source-1" - SOURCE_2 = "source-2" - DESTINATION = "destination" - GENERATION_NUMBERS = [6, None] + client._post_resource.assert_not_called() - after = ({"status": http_client.OK}, {}) - connection = _Connection(after) - client = _Client(connection) + @mock.patch("warnings.warn") + def test_compose_w_if_metageneration_match_list_w_warning(self, mock_warn): + from google.cloud.storage.blob import _COMPOSE_IF_METAGENERATION_LIST_DEPRECATED + + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + metageneration_number = [6] + client = mock.Mock(spec=["_post_resource"]) bucket = _Bucket(client=client) - source_1 = self._make_one(SOURCE_1, bucket=bucket) - source_2 = self._make_one(SOURCE_2, bucket=bucket) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + + destination = self._make_one(destination_name, bucket=bucket) - destination = self._make_one(DESTINATION, bucket=bucket) destination.compose( - sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS + sources=[source_1, source_2], if_metageneration_match=metageneration_number, ) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual( - kw[0], - { - "method": "POST", - "path": "/b/name/o/%s/compose" % DESTINATION, - "query_params": {}, - "data": { - "sourceObjects": [ - { - "name": source_1.name, - "objectPreconditions": { - "ifGenerationMatch": GENERATION_NUMBERS[0] - }, - }, - {"name": source_2.name}, - ], - "destination": {}, - }, - "_target_object": destination, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - }, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + {"name": source_1_name, "generation": None}, + {"name": source_2_name, "generation": None}, + ], + "destination": {}, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, + ) + + mock_warn.assert_called_with( + _COMPOSE_IF_METAGENERATION_LIST_DEPRECATED, + DeprecationWarning, + stacklevel=2, + ) + + def test_compose_w_metageneration_match(self): + source_1_name = "source-1" + source_2_name = "source-2" + destination_name = "destination" + metageneration_number = 1 + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = _Bucket(client=client) + source_1 = self._make_one(source_1_name, bucket=bucket) + source_2 = self._make_one(source_2_name, bucket=bucket) + destination = self._make_one(destination_name, bucket=bucket) + + destination.compose( + sources=[source_1, source_2], if_metageneration_match=metageneration_number, ) - def test_rewrite_response_without_resource(self): - SOURCE_BLOB = "source" - DEST_BLOB = "dest" - DEST_BUCKET = "other-bucket" - TOKEN = "TOKEN" - RESPONSE = { - "totalBytesRewritten": 33, - "objectSize": 42, + expected_path = "/b/name/o/%s/compose" % destination_name + expected_data = { + "sourceObjects": [ + {"name": source_1.name, "generation": source_1.generation}, + {"name": source_2.name, "generation": source_2.generation}, + ], + "destination": {}, + } + expected_query_params = {"ifMetagenerationMatch": metageneration_number} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=destination, + ) + + def test_rewrite_w_response_wo_resource(self): + source_name = "source" + dest_name = "dest" + other_bucket_name = "other-bucket" + bytes_rewritten = 33 + object_size = 52 + rewrite_token = "TOKEN" + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": False, - "rewriteToken": TOKEN, + "rewriteToken": rewrite_token, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response source_bucket = _Bucket(client=client) - source_blob = self._make_one(SOURCE_BLOB, bucket=source_bucket) - dest_bucket = _Bucket(client=client, name=DEST_BUCKET) - dest_blob = self._make_one(DEST_BLOB, bucket=dest_bucket) + source_blob = self._make_one(source_name, bucket=source_bucket) + dest_bucket = _Bucket(client=client, name=other_bucket_name) + dest_blob = self._make_one(dest_name, bucket=dest_bucket) token, rewritten, size = dest_blob.rewrite(source_blob) - self.assertEqual(token, TOKEN) - self.assertEqual(rewritten, 33) - self.assertEqual(size, 42) - - def test_rewrite_w_generations(self): - SOURCE_BLOB = "source" - SOURCE_GENERATION = 42 - DEST_BLOB = "dest" - DEST_BUCKET = "other-bucket" - DEST_GENERATION = 43 - TOKEN = "TOKEN" - RESPONSE = { - "totalBytesRewritten": 33, - "objectSize": 42, + self.assertEqual(token, rewrite_token) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) + + expected_path = "/b/%s/o/%s/rewriteTo/b/%s/o/%s" % ( + source_bucket.name, + source_name, + other_bucket_name, + dest_name, + ) + expected_data = {} + expected_query_params = {} + expected_headers = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=dest_blob, + ) + + def test_rewrite_w_generations_w_timeout(self): + source_name = "source" + source_generation = 22 + dest_name = "dest" + other_bucket_name = "other-bucket" + dest_generation = 23 + bytes_rewritten = 33 + object_size = 52 + rewrite_token = "TOKEN" + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": False, - "rewriteToken": TOKEN, + "rewriteToken": rewrite_token, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response source_bucket = _Bucket(client=client) source_blob = self._make_one( - SOURCE_BLOB, bucket=source_bucket, generation=SOURCE_GENERATION + source_name, bucket=source_bucket, generation=source_generation ) - dest_bucket = _Bucket(client=client, name=DEST_BUCKET) + dest_bucket = _Bucket(client=client, name=other_bucket_name) dest_blob = self._make_one( - DEST_BLOB, bucket=dest_bucket, generation=DEST_GENERATION + dest_name, bucket=dest_bucket, generation=dest_generation ) + timeout = 42 - token, rewritten, size = dest_blob.rewrite(source_blob, timeout=42) + token, rewritten, size = dest_blob.rewrite(source_blob, timeout=timeout) - self.assertEqual(token, TOKEN) - self.assertEqual(rewritten, 33) - self.assertEqual(size, 42) + self.assertEqual(token, rewrite_token) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual( - kw["path"], - "/b/%s/o/%s/rewriteTo/b/%s/o/%s" - % ( - (source_bucket.name, source_blob.name, dest_bucket.name, dest_blob.name) - ), - ) - self.assertEqual(kw["query_params"], {"sourceGeneration": SOURCE_GENERATION}) - self.assertEqual(kw["timeout"], 42) - - def test_rewrite_w_generation_match(self): - SOURCE_BLOB = "source" - SOURCE_GENERATION_NUMBER = 42 - DEST_BLOB = "dest" - DEST_BUCKET = "other-bucket" - DEST_GENERATION_NUMBER = 16 - TOKEN = "TOKEN" - RESPONSE = { - "totalBytesRewritten": 33, - "objectSize": 42, + expected_path = "/b/%s/o/%s/rewriteTo/b/%s/o/%s" % ( + source_bucket.name, + source_name, + other_bucket_name, + dest_name, + ) + expected_data = {"generation": dest_generation} + expected_query_params = {"sourceGeneration": source_generation} + expected_headers = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=timeout, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=dest_blob, + ) + + def test_rewrite_w_generation_match_w_retry(self): + source_name = "source" + source_generation = 42 + dest_name = "dest" + other_bucket_name = "other-bucket" + dest_generation = 16 + bytes_rewritten = 33 + object_size = 52 + rewrite_token = "TOKEN" + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": False, - "rewriteToken": TOKEN, + "rewriteToken": rewrite_token, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response source_bucket = _Bucket(client=client) source_blob = self._make_one( - SOURCE_BLOB, bucket=source_bucket, generation=SOURCE_GENERATION_NUMBER + source_name, bucket=source_bucket, generation=source_generation ) - dest_bucket = _Bucket(client=client, name=DEST_BUCKET) + dest_bucket = _Bucket(client=client, name=other_bucket_name) dest_blob = self._make_one( - DEST_BLOB, bucket=dest_bucket, generation=DEST_GENERATION_NUMBER + dest_name, bucket=dest_bucket, generation=dest_generation ) + retry = mock.Mock(spec=[]) + token, rewritten, size = dest_blob.rewrite( source_blob, - timeout=42, if_generation_match=dest_blob.generation, if_source_generation_match=source_blob.generation, + retry=retry, ) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual( - kw["path"], - "/b/%s/o/%s/rewriteTo/b/%s/o/%s" - % ( - (source_bucket.name, source_blob.name, dest_bucket.name, dest_blob.name) - ), + + self.assertEqual(token, rewrite_token) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) + + expected_path = "/b/%s/o/%s/rewriteTo/b/%s/o/%s" % ( + source_bucket.name, + source_name, + other_bucket_name, + dest_name, ) - self.assertEqual( - kw["query_params"], - { - "ifSourceGenerationMatch": SOURCE_GENERATION_NUMBER, - "ifGenerationMatch": DEST_GENERATION_NUMBER, - "sourceGeneration": SOURCE_GENERATION_NUMBER, - }, + expected_data = {"generation": dest_generation} + expected_query_params = { + "ifSourceGenerationMatch": source_generation, + "ifGenerationMatch": dest_generation, + "sourceGeneration": source_generation, + } + expected_headers = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=dest_blob, ) - self.assertEqual(kw["timeout"], 42) def test_rewrite_other_bucket_other_name_no_encryption_partial(self): - SOURCE_BLOB = "source" - DEST_BLOB = "dest" - DEST_BUCKET = "other-bucket" - TOKEN = "TOKEN" - RESPONSE = { - "totalBytesRewritten": 33, - "objectSize": 42, + source_name = "source" + dest_name = "dest" + other_bucket_name = "other-bucket" + bytes_rewritten = 33 + object_size = 52 + rewrite_token = "TOKEN" + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": False, - "rewriteToken": TOKEN, - "resource": {"etag": "DEADBEEF"}, + "rewriteToken": rewrite_token, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response source_bucket = _Bucket(client=client) - source_blob = self._make_one(SOURCE_BLOB, bucket=source_bucket) - dest_bucket = _Bucket(client=client, name=DEST_BUCKET) - dest_blob = self._make_one(DEST_BLOB, bucket=dest_bucket) + source_blob = self._make_one(source_name, bucket=source_bucket) + dest_bucket = _Bucket(client=client, name=other_bucket_name) + dest_blob = self._make_one(dest_name, bucket=dest_bucket) token, rewritten, size = dest_blob.rewrite(source_blob) - self.assertEqual(token, TOKEN) - self.assertEqual(rewritten, 33) - self.assertEqual(size, 42) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/%s/o/%s" % ( - SOURCE_BLOB, - DEST_BUCKET, - DEST_BLOB, - ) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual(kw[0]["query_params"], {}) - SENT = {} - self.assertEqual(kw[0]["data"], SENT) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key-Sha256", headers) - self.assertNotIn("X-Goog-Encryption-Algorithm", headers) - self.assertNotIn("X-Goog-Encryption-Key", headers) - self.assertNotIn("X-Goog-Encryption-Key-Sha256", headers) + self.assertEqual(token, rewrite_token) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) + + expected_path = "/b/name/o/%s/rewriteTo/b/%s/o/%s" % ( + source_name, + other_bucket_name, + dest_name, + ) + expected_query_params = {} + expected_data = {} + expected_headers = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=dest_blob, + ) def test_rewrite_same_name_no_old_key_new_key_done_w_user_project(self): - KEY = b"01234567890123456789012345678901" # 32 bytes - KEY_B64 = base64.b64encode(KEY).rstrip().decode("ascii") - KEY_HASH = hashlib.sha256(KEY).digest() - KEY_HASH_B64 = base64.b64encode(KEY_HASH).rstrip().decode("ascii") - BLOB_NAME = "blob" - USER_PROJECT = "user-project-123" - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, + blob_name = "blob" + user_project = "user-project-123" + key = b"01234567890123456789012345678901" # 32 bytes + key_b64 = base64.b64encode(key).rstrip().decode("ascii") + key_hash = hashlib.sha256(key).digest() + key_hash_b64 = base64.b64encode(key_hash).rstrip().decode("ascii") + bytes_rewritten = object_size = 52 + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": True, "resource": {"etag": "DEADBEEF"}, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - plain = self._make_one(BLOB_NAME, bucket=bucket) - encrypted = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=KEY) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + bucket = _Bucket(client=client, user_project=user_project) + plain = self._make_one(blob_name, bucket=bucket) + encrypted = self._make_one(blob_name, bucket=bucket, encryption_key=key) token, rewritten, size = encrypted.rewrite(plain) self.assertIsNone(token) - self.assertEqual(rewritten, 42) - self.assertEqual(size, 42) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - SENT = {} - self.assertEqual(kw[0]["data"], SENT) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key-Sha256", headers) - self.assertEqual(headers["X-Goog-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Encryption-Key"], KEY_B64) - self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], KEY_HASH_B64) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) + + expected_path = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (blob_name, blob_name) + expected_query_params = {"userProject": user_project} + expected_data = {} + expected_headers = { + "X-Goog-Encryption-Algorithm": "AES256", + "X-Goog-Encryption-Key": key_b64, + "X-Goog-Encryption-Key-Sha256": key_hash_b64, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=encrypted, + ) def test_rewrite_same_name_no_key_new_key_w_token(self): - SOURCE_KEY = b"01234567890123456789012345678901" # 32 bytes - SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode("ascii") - SOURCE_KEY_HASH = hashlib.sha256(SOURCE_KEY).digest() - SOURCE_KEY_HASH_B64 = base64.b64encode(SOURCE_KEY_HASH).rstrip().decode("ascii") - DEST_KEY = b"90123456789012345678901234567890" # 32 bytes - DEST_KEY_B64 = base64.b64encode(DEST_KEY).rstrip().decode("ascii") - DEST_KEY_HASH = hashlib.sha256(DEST_KEY).digest() - DEST_KEY_HASH_B64 = base64.b64encode(DEST_KEY_HASH).rstrip().decode("ascii") - BLOB_NAME = "blob" - TOKEN = "TOKEN" - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, + blob_name = "blob" + source_key = b"01234567890123456789012345678901" # 32 bytes + source_key_b64 = base64.b64encode(source_key).rstrip().decode("ascii") + source_key_hash = hashlib.sha256(source_key).digest() + source_key_hash_b64 = base64.b64encode(source_key_hash).rstrip().decode("ascii") + dest_key = b"90123456789012345678901234567890" # 32 bytes + dest_key_b64 = base64.b64encode(dest_key).rstrip().decode("ascii") + dest_key_hash = hashlib.sha256(dest_key).digest() + dest_key_hash_b64 = base64.b64encode(dest_key_hash).rstrip().decode("ascii") + previous_token = "TOKEN" + bytes_rewritten = object_size = 52 + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": True, "resource": {"etag": "DEADBEEF"}, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=SOURCE_KEY) - dest = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=DEST_KEY) + source = self._make_one(blob_name, bucket=bucket, encryption_key=source_key) + dest = self._make_one(blob_name, bucket=bucket, encryption_key=dest_key) - token, rewritten, size = dest.rewrite(source, token=TOKEN) + token, rewritten, size = dest.rewrite(source, token=previous_token) self.assertIsNone(token) - self.assertEqual(rewritten, 42) - self.assertEqual(size, 42) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual(kw[0]["query_params"], {"rewriteToken": TOKEN}) - SENT = {} - self.assertEqual(kw[0]["data"], SENT) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Key"], SOURCE_KEY_B64) - self.assertEqual( - headers["X-Goog-Copy-Source-Encryption-Key-Sha256"], SOURCE_KEY_HASH_B64 + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) + + expected_path = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (blob_name, blob_name) + expected_data = {} + expected_query_params = {"rewriteToken": previous_token} + expected_headers = { + "X-Goog-Copy-Source-Encryption-Algorithm": "AES256", + "X-Goog-Copy-Source-Encryption-Key": source_key_b64, + "X-Goog-Copy-Source-Encryption-Key-Sha256": source_key_hash_b64, + "X-Goog-Encryption-Algorithm": "AES256", + "X-Goog-Encryption-Key": dest_key_b64, + "X-Goog-Encryption-Key-Sha256": dest_key_hash_b64, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=dest, ) - self.assertEqual(headers["X-Goog-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Encryption-Key"], DEST_KEY_B64) - self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], DEST_KEY_HASH_B64) def test_rewrite_same_name_w_old_key_new_kms_key(self): - SOURCE_KEY = b"01234567890123456789012345678901" # 32 bytes - SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode("ascii") - SOURCE_KEY_HASH = hashlib.sha256(SOURCE_KEY).digest() - SOURCE_KEY_HASH_B64 = base64.b64encode(SOURCE_KEY_HASH).rstrip().decode("ascii") - DEST_KMS_RESOURCE = ( + blob_name = "blob" + source_key = b"01234567890123456789012345678901" # 32 bytes + source_key_b64 = base64.b64encode(source_key).rstrip().decode("ascii") + source_key_hash = hashlib.sha256(source_key).digest() + source_key_hash_b64 = base64.b64encode(source_key_hash).rstrip().decode("ascii") + dest_kms_resource = ( "projects/test-project-123/" "locations/us/" "keyRings/test-ring/" "cryptoKeys/test-key" ) - BLOB_NAME = "blob" - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, + bytes_rewritten = object_size = 42 + api_response = { + "totalBytesRewritten": bytes_rewritten, + "objectSize": object_size, "done": True, "resource": {"etag": "DEADBEEF"}, } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = _Bucket(client=client) - source = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=SOURCE_KEY) - dest = self._make_one(BLOB_NAME, bucket=bucket, kms_key_name=DEST_KMS_RESOURCE) + source = self._make_one(blob_name, bucket=bucket, encryption_key=source_key) + dest = self._make_one(blob_name, bucket=bucket, kms_key_name=dest_kms_resource) token, rewritten, size = dest.rewrite(source) self.assertIsNone(token) - self.assertEqual(rewritten, 42) - self.assertEqual(size, 42) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual( - kw[0]["query_params"], {"destinationKmsKeyName": DEST_KMS_RESOURCE} - ) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - SENT = {"kmsKeyName": DEST_KMS_RESOURCE} - self.assertEqual(kw[0]["data"], SENT) + self.assertEqual(rewritten, bytes_rewritten) + self.assertEqual(size, object_size) - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Key"], SOURCE_KEY_B64) - self.assertEqual( - headers["X-Goog-Copy-Source-Encryption-Key-Sha256"], SOURCE_KEY_HASH_B64 + expected_path = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (blob_name, blob_name) + expected_data = {"kmsKeyName": dest_kms_resource} + expected_query_params = {"destinationKmsKeyName": dest_kms_resource} + expected_headers = { + "X-Goog-Copy-Source-Encryption-Algorithm": "AES256", + "X-Goog-Copy-Source-Encryption-Key": source_key_b64, + "X-Goog-Copy-Source-Encryption-Key-Sha256": source_key_hash_b64, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=dest, ) def test_update_storage_class_invalid(self): - BLOB_NAME = "blob-name" + blob_name = "blob-name" bucket = _Bucket() - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) + blob.rewrite = mock.Mock(spec=[]) + with self.assertRaises(ValueError): blob.update_storage_class(u"BOGUS") - def test_update_storage_class_large_file(self): - BLOB_NAME = "blob-name" - STORAGE_CLASS = u"NEARLINE" - TOKEN = "TOKEN" - INCOMPLETE_RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 84, - "done": False, - "rewriteToken": TOKEN, - "resource": {"storageClass": STORAGE_CLASS}, - } - COMPLETE_RESPONSE = { - "totalBytesRewritten": 84, - "objectSize": 84, - "done": True, - "resource": {"storageClass": STORAGE_CLASS}, - } - response_1 = ({"status": http_client.OK}, INCOMPLETE_RESPONSE) - response_2 = ({"status": http_client.OK}, COMPLETE_RESPONSE) - connection = _Connection(response_1, response_2) - client = _Client(connection) + blob.rewrite.assert_not_called() + + def _update_storage_class_multi_pass_helper(self, **kw): + blob_name = "blob-name" + storage_class = u"NEARLINE" + rewrite_token = "TOKEN" + bytes_rewritten = 42 + object_size = 84 + client = mock.Mock(spec=[]) bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) + blob.rewrite = mock.Mock(spec=[]) + blob.rewrite.side_effect = [ + (rewrite_token, bytes_rewritten, object_size), + (None, object_size, object_size), + ] - blob.update_storage_class("NEARLINE") + expected_i_g_m = kw.get("if_generation_match") + expected_i_g_n_m = kw.get("if_generation_not_match") + expected_i_m_m = kw.get("if_metageneration_match") + expected_i_m_n_m = kw.get("if_metageneration_not_match") + expected_i_s_g_m = kw.get("if_source_generation_match") + expected_i_s_g_n_m = kw.get("if_source_generation_not_match") + expected_i_s_m_m = kw.get("if_source_metageneration_match") + expected_i_s_m_n_m = kw.get("if_source_metageneration_not_match") + expected_timeout = kw.get("timeout", self._get_default_timeout()) + expected_retry = kw.get("retry", DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(blob.storage_class, "NEARLINE") + blob.update_storage_class(storage_class, **kw) - def test_update_storage_class_with_custom_timeout(self): - BLOB_NAME = "blob-name" - STORAGE_CLASS = u"NEARLINE" - TOKEN = "TOKEN" - INCOMPLETE_RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 84, - "done": False, - "rewriteToken": TOKEN, - "resource": {"storageClass": STORAGE_CLASS}, - } - COMPLETE_RESPONSE = { - "totalBytesRewritten": 84, - "objectSize": 84, - "done": True, - "resource": {"storageClass": STORAGE_CLASS}, - } - response_1 = ({"status": http_client.OK}, INCOMPLETE_RESPONSE) - response_2 = ({"status": http_client.OK}, COMPLETE_RESPONSE) - connection = _Connection(response_1, response_2) - client = _Client(connection) - bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + self.assertEqual(blob.storage_class, storage_class) + + call1 = mock.call( + blob, + if_generation_match=expected_i_g_m, + if_generation_not_match=expected_i_g_n_m, + if_metageneration_match=expected_i_m_m, + if_metageneration_not_match=expected_i_m_n_m, + if_source_generation_match=expected_i_s_g_m, + if_source_generation_not_match=expected_i_s_g_n_m, + if_source_metageneration_match=expected_i_s_m_m, + if_source_metageneration_not_match=expected_i_s_m_n_m, + timeout=expected_timeout, + retry=expected_retry, + ) + call2 = mock.call( + blob, + token=rewrite_token, + if_generation_match=expected_i_g_m, + if_generation_not_match=expected_i_g_n_m, + if_metageneration_match=expected_i_m_m, + if_metageneration_not_match=expected_i_m_n_m, + if_source_generation_match=expected_i_s_g_m, + if_source_generation_not_match=expected_i_s_g_n_m, + if_source_metageneration_match=expected_i_s_m_m, + if_source_metageneration_not_match=expected_i_s_m_n_m, + timeout=expected_timeout, + retry=expected_retry, + ) + blob.rewrite.assert_has_calls([call1, call2]) - blob.update_storage_class("NEARLINE", timeout=9.58) + def test_update_storage_class_multi_pass_w_defaults(self): + self._update_storage_class_multi_pass_helper() - self.assertEqual(blob.storage_class, "NEARLINE") + def test_update_storage_class_multi_pass_w_i_g_m(self): + generation = 16 + self._update_storage_class_multi_pass_helper(if_generation_match=generation) - kw = connection._requested - self.assertEqual(len(kw), 2) + def test_update_storage_class_multi_pass_w_i_g_n_m(self): + generation = 16 + self._update_storage_class_multi_pass_helper(if_generation_not_match=generation) - for kw_item in kw: - self.assertIn("timeout", kw_item) - self.assertEqual(kw_item["timeout"], 9.58) + def test_update_storage_class_multi_pass_w_i_m_m(self): + metageneration = 16 + self._update_storage_class_multi_pass_helper( + if_metageneration_match=metageneration, + ) - def test_update_storage_class_wo_encryption_key(self): - BLOB_NAME = "blob-name" - STORAGE_CLASS = u"NEARLINE" - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, - "done": True, - "resource": {"storageClass": STORAGE_CLASS}, - } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) + def test_update_storage_class_multi_pass_w_i_m_n_m(self): + metageneration = 16 + self._update_storage_class_multi_pass_helper( + if_metageneration_not_match=metageneration, + ) + + def test_update_storage_class_multi_pass_w_i_s_g_m(self): + generation = 16 + self._update_storage_class_multi_pass_helper( + if_source_generation_match=generation + ) + + def test_update_storage_class_multi_pass_w_i_s_g_n_m(self): + generation = 16 + self._update_storage_class_multi_pass_helper( + if_source_generation_not_match=generation + ) + + def test_update_storage_class_multi_pass_w_i_s_m_m(self): + metageneration = 16 + self._update_storage_class_multi_pass_helper( + if_source_metageneration_match=metageneration, + ) + + def test_update_storage_class_multi_pass_w_i_s_m_n_m(self): + metageneration = 16 + self._update_storage_class_multi_pass_helper( + if_source_metageneration_not_match=metageneration, + ) + + def test_update_storage_class_multi_pass_w_timeout(self): + timeout = 42 + self._update_storage_class_multi_pass_helper(timeout=timeout) + + def test_update_storage_class_multi_pass_w_retry(self): + retry = mock.Mock(spec=[]) + self._update_storage_class_multi_pass_helper(retry=retry) + + def _update_storage_class_single_pass_helper(self, **kw): + blob_name = "blob-name" + storage_class = u"NEARLINE" + object_size = 84 + client = mock.Mock(spec=[]) bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + blob = self._make_one(blob_name, bucket=bucket) + blob.rewrite = mock.Mock(spec=[]) + blob.rewrite.return_value = (None, object_size, object_size) + + expected_i_g_m = kw.get("if_generation_match") + expected_i_g_n_m = kw.get("if_generation_not_match") + expected_i_m_m = kw.get("if_metageneration_match") + expected_i_m_n_m = kw.get("if_metageneration_not_match") + expected_i_s_g_m = kw.get("if_source_generation_match") + expected_i_s_g_n_m = kw.get("if_source_generation_not_match") + expected_i_s_m_m = kw.get("if_source_metageneration_match") + expected_i_s_m_n_m = kw.get("if_source_metageneration_not_match") + expected_timeout = kw.get("timeout", self._get_default_timeout()) + expected_retry = kw.get("retry", DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + + blob.update_storage_class(storage_class, **kw) - blob.update_storage_class("NEARLINE") + self.assertEqual(blob.storage_class, storage_class) - self.assertEqual(blob.storage_class, "NEARLINE") + blob.rewrite.assert_called_once_with( + blob, + if_generation_match=expected_i_g_m, + if_generation_not_match=expected_i_g_n_m, + if_metageneration_match=expected_i_m_m, + if_metageneration_not_match=expected_i_m_n_m, + if_source_generation_match=expected_i_s_g_m, + if_source_generation_not_match=expected_i_s_g_n_m, + if_source_metageneration_match=expected_i_s_m_m, + if_source_metageneration_not_match=expected_i_s_m_n_m, + timeout=expected_timeout, + retry=expected_retry, + ) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual(kw[0]["query_params"], {}) - SENT = {"storageClass": STORAGE_CLASS} - self.assertEqual(kw[0]["data"], SENT) - - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - # Blob has no key, and therefore the relevant headers are not sent. - self.assertNotIn("X-Goog-Copy-Source-Encryption-Algorithm", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key", headers) - self.assertNotIn("X-Goog-Copy-Source-Encryption-Key-Sha256", headers) - self.assertNotIn("X-Goog-Encryption-Algorithm", headers) - self.assertNotIn("X-Goog-Encryption-Key", headers) - self.assertNotIn("X-Goog-Encryption-Key-Sha256", headers) - - def test_update_storage_class_w_encryption_key_w_user_project(self): - BLOB_NAME = "blob-name" - BLOB_KEY = b"01234567890123456789012345678901" # 32 bytes - BLOB_KEY_B64 = base64.b64encode(BLOB_KEY).rstrip().decode("ascii") - BLOB_KEY_HASH = hashlib.sha256(BLOB_KEY).digest() - BLOB_KEY_HASH_B64 = base64.b64encode(BLOB_KEY_HASH).rstrip().decode("ascii") - STORAGE_CLASS = u"NEARLINE" - USER_PROJECT = "user-project-123" - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, - "done": True, - "resource": {"storageClass": STORAGE_CLASS}, - } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) - bucket = _Bucket(client=client, user_project=USER_PROJECT) - blob = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=BLOB_KEY) + def test_update_storage_class_single_pass_w_defaults(self): + self._update_storage_class_single_pass_helper() - blob.update_storage_class("NEARLINE") + def test_update_storage_class_single_pass_w_i_g_m(self): + generation = 16 + self._update_storage_class_single_pass_helper(if_generation_match=generation) - self.assertEqual(blob.storage_class, "NEARLINE") + def test_update_storage_class_single_pass_w_i_g_n_m(self): + generation = 16 + self._update_storage_class_single_pass_helper( + if_generation_not_match=generation + ) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - SENT = {"storageClass": STORAGE_CLASS} - self.assertEqual(kw[0]["data"], SENT) - - headers = {key.title(): str(value) for key, value in kw[0]["headers"].items()} - # Blob has key, and therefore the relevant headers are sent. - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Copy-Source-Encryption-Key"], BLOB_KEY_B64) - self.assertEqual( - headers["X-Goog-Copy-Source-Encryption-Key-Sha256"], BLOB_KEY_HASH_B64 + def test_update_storage_class_single_pass_w_i_m_m(self): + metageneration = 16 + self._update_storage_class_single_pass_helper( + if_metageneration_match=metageneration, ) - self.assertEqual(headers["X-Goog-Encryption-Algorithm"], "AES256") - self.assertEqual(headers["X-Goog-Encryption-Key"], BLOB_KEY_B64) - self.assertEqual(headers["X-Goog-Encryption-Key-Sha256"], BLOB_KEY_HASH_B64) - def test_update_storage_class_w_generation_match(self): - BLOB_NAME = "blob-name" - STORAGE_CLASS = u"NEARLINE" - GENERATION_NUMBER = 6 - SOURCE_GENERATION_NUMBER = 9 - RESPONSE = { - "totalBytesRewritten": 42, - "objectSize": 42, - "done": True, - "resource": {"storageClass": STORAGE_CLASS}, - } - response = ({"status": http_client.OK}, RESPONSE) - connection = _Connection(response) - client = _Client(connection) - bucket = _Bucket(client=client) - blob = self._make_one(BLOB_NAME, bucket=bucket) + def test_update_storage_class_single_pass_w_i_m_n_m(self): + metageneration = 16 + self._update_storage_class_single_pass_helper( + if_metageneration_not_match=metageneration, + ) - blob.update_storage_class( - "NEARLINE", - if_generation_match=GENERATION_NUMBER, - if_source_generation_match=SOURCE_GENERATION_NUMBER, + def test_update_storage_class_single_pass_w_i_s_g_m(self): + generation = 16 + self._update_storage_class_single_pass_helper( + if_source_generation_match=generation ) - self.assertEqual(blob.storage_class, "NEARLINE") + def test_update_storage_class_single_pass_w_i_s_g_n_m(self): + generation = 16 + self._update_storage_class_single_pass_helper( + if_source_generation_not_match=generation + ) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "POST") - PATH = "/b/name/o/%s/rewriteTo/b/name/o/%s" % (BLOB_NAME, BLOB_NAME) - self.assertEqual(kw[0]["path"], PATH) - self.assertEqual( - kw[0]["query_params"], - { - "ifGenerationMatch": GENERATION_NUMBER, - "ifSourceGenerationMatch": SOURCE_GENERATION_NUMBER, - }, + def test_update_storage_class_single_pass_w_i_s_m_m(self): + metageneration = 16 + self._update_storage_class_single_pass_helper( + if_source_metageneration_match=metageneration, + ) + + def test_update_storage_class_single_pass_w_i_s_m_n_m(self): + metageneration = 16 + self._update_storage_class_single_pass_helper( + if_source_metageneration_not_match=metageneration, ) - SENT = {"storageClass": STORAGE_CLASS} - self.assertEqual(kw[0]["data"], SENT) + + def test_update_storage_class_single_pass_w_timeout(self): + timeout = 42 + self._update_storage_class_single_pass_helper(timeout=timeout) + + def test_update_storage_class_single_pass_w_retry(self): + retry = mock.Mock(spec=[]) + self._update_storage_class_single_pass_helper(retry=retry) def test_cache_control_getter(self): BLOB_NAME = "blob-name" @@ -4677,8 +5182,7 @@ def test_custom_time_unset(self): def test_from_string_w_valid_uri(self): from google.cloud.storage.blob import Blob - connection = _Connection() - client = _Client(connection) + client = self._make_client() uri = "gs://BUCKET_NAME/b" blob = Blob.from_string(uri, client) @@ -4690,8 +5194,7 @@ def test_from_string_w_valid_uri(self): def test_from_string_w_invalid_uri(self): from google.cloud.storage.blob import Blob - connection = _Connection() - client = _Client(connection) + client = self._make_client() with pytest.raises(ValueError, match="URI scheme must be gs"): Blob.from_string("http://bucket_name/b", client) @@ -4699,8 +5202,7 @@ def test_from_string_w_invalid_uri(self): def test_from_string_w_domain_name_bucket(self): from google.cloud.storage.blob import Blob - connection = _Connection() - client = _Client(connection) + client = self._make_client() uri = "gs://buckets.example.com/b" blob = Blob.from_string(uri, client) @@ -4888,30 +5390,12 @@ class _Connection(object): USER_AGENT = "testing 1.2.3" credentials = object() - def __init__(self, *responses): - self._responses = responses[:] - self._requested = [] - self._signed = [] - - def _respond(self, **kw): - self._requested.append(kw) - response, self._responses = self._responses[0], self._responses[1:] - return response - - def api_request(self, **kw): - from google.cloud.exceptions import NotFound - - info, content = self._respond(**kw) - if info.get("status") == http_client.NOT_FOUND: - raise NotFound(info) - return content - class _Bucket(object): def __init__(self, client=None, name="name", user_project=None): if client is None: - connection = _Connection() - client = _Client(connection) + client = Test_Blob._make_client() + self.client = client self._blobs = {} self._copied = [] @@ -4946,16 +5430,3 @@ def delete_blob( retry, ) ) - - -class _Client(object): - def __init__(self, connection): - self._base_connection = connection - - @property - def _connection(self): - return self._base_connection - - @property - def _credentials(self): - return self._base_connection.credentials diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py index 4d776c365..244c26b2a 100644 --- a/tests/unit/test_bucket.py +++ b/tests/unit/test_bucket.py @@ -19,19 +19,11 @@ import pytest from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import DEFAULT_RETRY_IF_ETAG_IN_JSON from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED from google.cloud.storage.retry import DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED -def _make_connection(*responses): - import google.cloud.storage._http - - mock_connection = mock.create_autospec(google.cloud.storage._http.Connection) - mock_connection.user_agent = "testing 1.2.3" - mock_connection.api_request.side_effect = list(responses) - return mock_connection - - def _create_signing_credentials(): import google.auth.credentials @@ -45,6 +37,79 @@ class _SigningCredentials( return credentials +class Test__blobs_page_start(unittest.TestCase): + @staticmethod + def _call_fut(iterator, page, response): + from google.cloud.storage.bucket import _blobs_page_start + + return _blobs_page_start(iterator, page, response) + + def test_wo_any_prefixes(self): + iterator = mock.Mock(spec=["prefixes"], prefixes=set()) + page = mock.Mock(spec=["prefixes"]) + response = {} + + self._call_fut(iterator, page, response) + + self.assertEqual(page.prefixes, ()) + self.assertEqual(iterator.prefixes, set()) + + def test_w_prefixes(self): + iterator_prefixes = set(["foo/", "qux/"]) + iterator = mock.Mock(spec=["prefixes"], prefixes=iterator_prefixes) + page = mock.Mock(spec=["prefixes"]) + page_prefixes = ["foo/", "bar/", "baz/"] + response = {"prefixes": page_prefixes} + + self._call_fut(iterator, page, response) + + self.assertEqual(page.prefixes, tuple(page_prefixes)) + self.assertEqual(iterator.prefixes, iterator_prefixes.union(page_prefixes)) + + +class Test__item_to_blob(unittest.TestCase): + @staticmethod + def _call_fut(iterator, item): + from google.cloud.storage.bucket import _item_to_blob + + return _item_to_blob(iterator, item) + + def test_wo_extra_properties(self): + from google.cloud.storage.blob import Blob + + blob_name = "blob-name" + bucket = mock.Mock(spec=[]) + iterator = mock.Mock(spec=["bucket"], bucket=bucket) + item = {"name": blob_name} + + blob = self._call_fut(iterator, item) + + self.assertIsInstance(blob, Blob) + self.assertIs(blob.bucket, bucket) + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob._properties, item) + + def test_w_extra_properties(self): + from google.cloud.storage.blob import Blob + + blob_name = "blob-name" + bucket = mock.Mock(spec=[]) + iterator = mock.Mock(spec=["bucket"], bucket=bucket) + item = { + "name": blob_name, + "generation": 123, + "contentType": "text/plain", + "contentLanguage": "en-US", + } + + blob = self._call_fut(iterator, item) + + self.assertIsInstance(blob, Blob) + self.assertIs(blob.bucket, bucket) + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob._properties, item) + + class Test_LifecycleRuleConditions(unittest.TestCase): @staticmethod def _get_target_class(): @@ -450,15 +515,14 @@ def _get_default_timeout(): return _DEFAULT_TIMEOUT @staticmethod - def _make_client(*args, **kw): + def _make_client(**kw): from google.cloud.storage.client import Client - return Client(*args, **kw) + return mock.create_autospec(Client, instance=True, **kw) def _make_one(self, client=None, name=None, properties=None, user_project=None): if client is None: - connection = _Connection() - client = _Client(connection) + client = self._make_client() if user_project is None: bucket = self._get_target_class()(client, name=name) else: @@ -490,8 +554,7 @@ def test_ctor(self): def test_ctor_w_user_project(self): NAME = "name" USER_PROJECT = "user-project-123" - connection = _Connection() - client = _Client(connection) + client = self._make_client() bucket = self._make_one(client, name=NAME, user_project=USER_PROJECT) self.assertEqual(bucket.name, NAME) self.assertEqual(bucket._properties, {}) @@ -583,7 +646,7 @@ def test_notification_defaults(self): PROJECT = "PROJECT" BUCKET_NAME = "BUCKET_NAME" TOPIC_NAME = "TOPIC_NAME" - client = _Client(_Connection(), project=PROJECT) + client = self._make_client(project=PROJECT) bucket = self._make_one(client, name=BUCKET_NAME) notification = bucket.notification(TOPIC_NAME) @@ -611,7 +674,7 @@ def test_notification_explicit(self): CUSTOM_ATTRIBUTES = {"attr1": "value1", "attr2": "value2"} EVENT_TYPES = [OBJECT_FINALIZE_EVENT_TYPE, OBJECT_DELETE_EVENT_TYPE] BLOB_NAME_PREFIX = "blob-name-prefix/" - client = _Client(_Connection(), project=PROJECT) + client = self._make_client(project=PROJECT) bucket = self._make_one(client, name=BUCKET_NAME) notification = bucket.notification( @@ -650,95 +713,72 @@ def test_user_project(self): bucket._user_project = USER_PROJECT self.assertEqual(bucket.user_project, USER_PROJECT) - def test_exists_miss(self): + def test_exists_miss_w_defaults(self): from google.cloud.exceptions import NotFound - class _FakeConnection(object): + bucket_name = "bucket-name" + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.side_effect = NotFound("testing") + bucket = self._make_one(client, name=bucket_name) - _called_with = [] + self.assertFalse(bucket.exists()) - @classmethod - def api_request(cls, *args, **kwargs): - cls._called_with.append((args, kwargs)) - raise NotFound(args) - - BUCKET_NAME = "bucket-name" - bucket = self._make_one(name=BUCKET_NAME) - client = _Client(_FakeConnection) - self.assertFalse(bucket.exists(client=client, timeout=42)) - expected_called_kwargs = { - "method": "GET", - "path": bucket.path, - "query_params": {"fields": "name"}, - "_target_object": None, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - expected_cw = [((), expected_called_kwargs)] - self.assertEqual(_FakeConnection._called_with, expected_cw) - - def test_exists_with_metageneration_match(self): - class _FakeConnection(object): - - _called_with = [] - - @classmethod - def api_request(cls, *args, **kwargs): - cls._called_with.append((args, kwargs)) - # exists() does not use the return value - return object() + expected_query_params = {"fields": "name"} + client._get_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) - BUCKET_NAME = "bucket-name" - METAGENERATION_NUMBER = 6 + def test_exists_w_metageneration_match_w_timeout(self): + bucket_name = "bucket-name" + metageneration_number = 6 + timeout = 42 + api_response = {"name": bucket_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=bucket_name) - bucket = self._make_one(name=BUCKET_NAME) - client = _Client(_FakeConnection) self.assertTrue( - bucket.exists( - client=client, timeout=42, if_metageneration_match=METAGENERATION_NUMBER - ) + bucket.exists(timeout=42, if_metageneration_match=metageneration_number) ) - expected_called_kwargs = { - "method": "GET", - "path": bucket.path, - "query_params": { - "fields": "name", - "ifMetagenerationMatch": METAGENERATION_NUMBER, - }, - "_target_object": None, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - expected_cw = [((), expected_called_kwargs)] - self.assertEqual(_FakeConnection._called_with, expected_cw) - - def test_exists_hit_w_user_project(self): - USER_PROJECT = "user-project-123" - class _FakeConnection(object): + expected_query_params = { + "fields": "name", + "ifMetagenerationMatch": metageneration_number, + } + client._get_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=None, + ) - _called_with = [] + def test_exists_hit_w_user_project_w_retry_w_explicit_client(self): + bucket_name = "bucket-name" + user_project = "user-project-123" + retry = mock.Mock(spec=[]) + api_response = {"name": bucket_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(name=bucket_name, user_project=user_project) - @classmethod - def api_request(cls, *args, **kwargs): - cls._called_with.append((args, kwargs)) - # exists() does not use the return value - return object() + self.assertTrue(bucket.exists(client=client, retry=retry)) - BUCKET_NAME = "bucket-name" - bucket = self._make_one(name=BUCKET_NAME, user_project=USER_PROJECT) - client = _Client(_FakeConnection) - self.assertTrue(bucket.exists(client=client)) - expected_called_kwargs = { - "method": "GET", - "path": bucket.path, - "query_params": {"fields": "name", "userProject": USER_PROJECT}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, + expected_query_params = { + "fields": "name", + "userProject": user_project, } - expected_cw = [((), expected_called_kwargs)] - self.assertEqual(_FakeConnection._called_with, expected_cw) + client._get_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=None, + ) def test_acl_property(self): from google.cloud.storage.acl import BucketACL @@ -765,671 +805,920 @@ def test_path_w_name(self): bucket = self._make_one(name=NAME) self.assertEqual(bucket.path, "/b/%s" % NAME) - def test_get_blob_miss(self): - NAME = "name" - NONESUCH = "nonesuch" - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(name=NAME) - result = bucket.get_blob(NONESUCH, client=client, timeout=42) + def test_get_blob_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + from google.cloud.storage.blob import Blob + + name = "name" + blob_name = "nonesuch" + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.side_effect = NotFound("testing") + bucket = self._make_one(client, name=name) + + result = bucket.get_blob(blob_name) + self.assertIsNone(result) - (kw,) = connection._requested - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) - self.assertEqual(kw["timeout"], 42) + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=mock.ANY, + ) + + target = client._get_resource.call_args[1]["_target_object"] + self.assertIsInstance(target, Blob) + self.assertIs(target.bucket, bucket) + self.assertEqual(target.name, blob_name) def test_get_blob_hit_w_user_project(self): - NAME = "name" - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - connection = _Connection({"name": BLOB_NAME}) - client = _Client(connection) - bucket = self._make_one(name=NAME, user_project=USER_PROJECT) - blob = bucket.get_blob(BLOB_NAME, client=client) + from google.cloud.storage.blob import Blob + + name = "name" + blob_name = "blob-name" + user_project = "user-project-123" + api_response = {"name": blob_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=name, user_project=user_project) + + blob = bucket.get_blob(blob_name, client=client) + + self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) - self.assertEqual(blob.name, BLOB_NAME) - (kw,) = connection._requested - expected_qp = {"userProject": USER_PROJECT, "projection": "noAcl"} - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["query_params"], expected_qp) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - - def test_get_blob_hit_w_generation(self): - NAME = "name" - BLOB_NAME = "blob-name" - GENERATION = 1512565576797178 - connection = _Connection({"name": BLOB_NAME, "generation": GENERATION}) - client = _Client(connection) - bucket = self._make_one(name=NAME) - blob = bucket.get_blob(BLOB_NAME, client=client, generation=GENERATION) + self.assertEqual(blob.name, blob_name) + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = { + "userProject": user_project, + "projection": "noAcl", + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=blob, + ) + + def test_get_blob_hit_w_generation_w_timeout(self): + from google.cloud.storage.blob import Blob + + name = "name" + blob_name = "blob-name" + generation = 1512565576797178 + timeout = 42 + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=name) + + blob = bucket.get_blob(blob_name, generation=generation, timeout=timeout) + + self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) - self.assertEqual(blob.name, BLOB_NAME) - self.assertEqual(blob.generation, GENERATION) - (kw,) = connection._requested - expected_qp = {"generation": GENERATION, "projection": "noAcl"} - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["query_params"], expected_qp) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - - def test_get_blob_w_generation_match(self): - NAME = "name" - BLOB_NAME = "blob-name" - GENERATION = 1512565576797178 + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob.generation, generation) - connection = _Connection({"name": BLOB_NAME, "generation": GENERATION}) - client = _Client(connection) - bucket = self._make_one(name=NAME) - blob = bucket.get_blob(BLOB_NAME, client=client, if_generation_match=GENERATION) + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = { + "generation": generation, + "projection": "noAcl", + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=blob, + ) + + def test_get_blob_w_generation_match_w_retry(self): + from google.cloud.storage.blob import Blob + + name = "name" + blob_name = "blob-name" + generation = 1512565576797178 + retry = mock.Mock(spec=[]) + api_response = {"name": blob_name, "generation": generation} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=name) + blob = bucket.get_blob(blob_name, if_generation_match=generation, retry=retry) + + self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) - self.assertEqual(blob.name, BLOB_NAME) - self.assertEqual(blob.generation, GENERATION) - (kw,) = connection._requested - expected_qp = {"ifGenerationMatch": GENERATION, "projection": "noAcl"} - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["query_params"], expected_qp) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - - def test_get_blob_hit_with_kwargs(self): + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob.generation, generation) + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = { + "ifGenerationMatch": generation, + "projection": "noAcl", + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=blob, + ) + + def test_get_blob_hit_with_kwargs_w_explicit_client(self): + from google.cloud.storage.blob import Blob from google.cloud.storage.blob import _get_encryption_headers - NAME = "name" - BLOB_NAME = "blob-name" - CHUNK_SIZE = 1024 * 1024 - KEY = b"01234567890123456789012345678901" # 32 bytes + name = "name" + blob_name = "blob-name" + chunk_size = 1024 * 1024 + key = b"01234567890123456789012345678901" # 32 bytes + api_response = {"name": blob_name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(name=name) - connection = _Connection({"name": BLOB_NAME}) - client = _Client(connection) - bucket = self._make_one(name=NAME) blob = bucket.get_blob( - BLOB_NAME, client=client, encryption_key=KEY, chunk_size=CHUNK_SIZE + blob_name, client=client, encryption_key=key, chunk_size=chunk_size ) + + self.assertIsInstance(blob, Blob) self.assertIs(blob.bucket, bucket) - self.assertEqual(blob.name, BLOB_NAME) - (kw,) = connection._requested - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["headers"], _get_encryption_headers(KEY)) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(blob.chunk_size, CHUNK_SIZE) - self.assertEqual(blob._encryption_key, KEY) + self.assertEqual(blob.name, blob_name) + self.assertEqual(blob.chunk_size, chunk_size) + self.assertEqual(blob._encryption_key, key) - def test_list_blobs_defaults(self): - NAME = "name" - connection = _Connection({"items": []}) - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) - iterator = bucket.list_blobs() - blobs = list(iterator) - self.assertEqual(blobs, []) - (kw,) = connection._requested - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o" % NAME) - self.assertEqual(kw["query_params"], {"projection": "noAcl"}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - - def test_list_blobs_w_all_arguments_and_user_project(self): - NAME = "name" - USER_PROJECT = "user-project-123" - MAX_RESULTS = 10 - PAGE_TOKEN = "ABCD" - PREFIX = "subfolder" - DELIMITER = "/" - START_OFFSET = "c" - END_OFFSET = "g" - INCLUDE_TRAILING_DELIMITER = True - VERSIONS = True - PROJECTION = "full" - FIELDS = "items/contentLanguage,nextPageToken" - EXPECTED = { - "maxResults": 10, - "pageToken": PAGE_TOKEN, - "prefix": PREFIX, - "delimiter": DELIMITER, - "startOffset": START_OFFSET, - "endOffset": END_OFFSET, - "includeTrailingDelimiter": INCLUDE_TRAILING_DELIMITER, - "versions": VERSIONS, - "projection": PROJECTION, - "fields": FIELDS, - "userProject": USER_PROJECT, + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = { + "projection": "noAcl", } - connection = _Connection({"items": []}) + expected_headers = _get_encryption_headers(key) + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=blob, + ) + + def test_list_blobs_w_defaults(self): + name = "name" client = self._make_client() - client._base_connection = connection - bucket = self._make_one(name=NAME, user_project=USER_PROJECT) + client.list_blobs = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=name) + + iterator = bucket.list_blobs() + + self.assertIs(iterator, client.list_blobs.return_value) + + expected_page_token = None + expected_max_results = None + expected_prefix = None + expected_delimiter = None + expected_start_offset = None + expected_end_offset = None + expected_include_trailing_delimiter = None + expected_versions = None + expected_projection = "noAcl" + expected_fields = None + client.list_blobs.assert_called_once_with( + bucket, + max_results=expected_max_results, + page_token=expected_page_token, + prefix=expected_prefix, + delimiter=expected_delimiter, + start_offset=expected_start_offset, + end_offset=expected_end_offset, + include_trailing_delimiter=expected_include_trailing_delimiter, + versions=expected_versions, + projection=expected_projection, + fields=expected_fields, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + def test_list_blobs_w_explicit(self): + name = "name" + max_results = 10 + page_token = "ABCD" + prefix = "subfolder" + delimiter = "/" + start_offset = "c" + end_offset = "g" + include_trailing_delimiter = True + versions = True + projection = "full" + fields = "items/contentLanguage,nextPageToken" + bucket = self._make_one(client=None, name=name) + other_client = self._make_client() + other_client.list_blobs = mock.Mock(spec=[]) + timeout = 42 + retry = mock.Mock(spec=[]) + iterator = bucket.list_blobs( - max_results=MAX_RESULTS, - page_token=PAGE_TOKEN, - prefix=PREFIX, - delimiter=DELIMITER, - start_offset=START_OFFSET, - end_offset=END_OFFSET, - include_trailing_delimiter=INCLUDE_TRAILING_DELIMITER, - versions=VERSIONS, - projection=PROJECTION, - fields=FIELDS, - client=client, - timeout=42, + max_results=max_results, + page_token=page_token, + prefix=prefix, + delimiter=delimiter, + start_offset=start_offset, + end_offset=end_offset, + include_trailing_delimiter=include_trailing_delimiter, + versions=versions, + projection=projection, + fields=fields, + client=other_client, + timeout=timeout, + retry=retry, ) - blobs = list(iterator) - self.assertEqual(blobs, []) - (kw,) = connection._requested - self.assertEqual(kw["method"], "GET") - self.assertEqual(kw["path"], "/b/%s/o" % NAME) - self.assertEqual(kw["query_params"], EXPECTED) - self.assertEqual(kw["timeout"], 42) - def test_list_notifications(self): - from google.cloud.storage.notification import BucketNotification - from google.cloud.storage.notification import _TOPIC_REF_FMT - from google.cloud.storage.notification import ( - JSON_API_V1_PAYLOAD_FORMAT, - NONE_PAYLOAD_FORMAT, + self.assertIs(iterator, other_client.list_blobs.return_value) + + expected_page_token = page_token + expected_max_results = max_results + expected_prefix = prefix + expected_delimiter = delimiter + expected_start_offset = start_offset + expected_end_offset = end_offset + expected_include_trailing_delimiter = include_trailing_delimiter + expected_versions = versions + expected_projection = projection + expected_fields = fields + other_client.list_blobs.assert_called_once_with( + bucket, + max_results=expected_max_results, + page_token=expected_page_token, + prefix=expected_prefix, + delimiter=expected_delimiter, + start_offset=expected_start_offset, + end_offset=expected_end_offset, + include_trailing_delimiter=expected_include_trailing_delimiter, + versions=expected_versions, + projection=expected_projection, + fields=expected_fields, + timeout=timeout, + retry=retry, ) - NAME = "name" + def test_list_notifications_w_defaults(self): + from google.cloud.storage.bucket import _item_to_notification - topic_refs = [("my-project-123", "topic-1"), ("other-project-456", "topic-2")] + bucket_name = "name" + client = self._make_client() + client._list_resource = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=bucket_name) - resources = [ - { - "topic": _TOPIC_REF_FMT.format(*topic_refs[0]), - "id": "1", - "etag": "DEADBEEF", - "selfLink": "https://example.com/notification/1", - "payload_format": NONE_PAYLOAD_FORMAT, - }, - { - "topic": _TOPIC_REF_FMT.format(*topic_refs[1]), - "id": "2", - "etag": "FACECABB", - "selfLink": "https://example.com/notification/2", - "payload_format": JSON_API_V1_PAYLOAD_FORMAT, - }, - ] - connection = _Connection({"items": resources}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + iterator = bucket.list_notifications() + + self.assertIs(iterator, client._list_resource.return_value) + self.assertIs(iterator.bucket, bucket) + + expected_path = "/b/{}/notificationConfigs".format(bucket_name) + expected_item_to_value = _item_to_notification + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) - notifications = list(bucket.list_notifications(timeout=42)) + def test_list_notifications_w_explicit(self): + from google.cloud.storage.bucket import _item_to_notification - req_args = client._connection._requested[0] - self.assertEqual(req_args.get("timeout"), 42) + bucket_name = "name" + other_client = self._make_client() + other_client._list_resource = mock.Mock(spec=[]) + bucket = self._make_one(client=None, name=bucket_name) + timeout = 42 + retry = mock.Mock(spec=[]) - self.assertEqual(len(notifications), len(resources)) - for notification, resource, topic_ref in zip( - notifications, resources, topic_refs - ): - self.assertIsInstance(notification, BucketNotification) - self.assertEqual(notification.topic_project, topic_ref[0]) - self.assertEqual(notification.topic_name, topic_ref[1]) - self.assertEqual(notification.notification_id, resource["id"]) - self.assertEqual(notification.etag, resource["etag"]) - self.assertEqual(notification.self_link, resource["selfLink"]) - self.assertEqual( - notification.custom_attributes, resource.get("custom_attributes") - ) - self.assertEqual(notification.event_types, resource.get("event_types")) - self.assertEqual( - notification.blob_name_prefix, resource.get("blob_name_prefix") - ) - self.assertEqual( - notification.payload_format, resource.get("payload_format") - ) + iterator = bucket.list_notifications( + client=other_client, timeout=timeout, retry=retry, + ) + + self.assertIs(iterator, other_client._list_resource.return_value) + self.assertIs(iterator.bucket, bucket) + + expected_path = "/b/{}/notificationConfigs".format(bucket_name) + expected_item_to_value = _item_to_notification + other_client._list_resource.assert_called_once_with( + expected_path, expected_item_to_value, timeout=timeout, retry=retry, + ) + + def test_get_notification_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + + project = "my-project-123" + name = "name" + notification_id = "1" + + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.side_effect = NotFound("testing") + client.project = project + bucket = self._make_one(client=client, name=name) + + with self.assertRaises(NotFound): + bucket.get_notification(notification_id=notification_id) + + expected_path = "/b/{}/notificationConfigs/{}".format(name, notification_id) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) - def test_get_notification(self): + def test_get_notification_hit_w_explicit_w_user_project(self): + from google.cloud.storage.notification import BucketNotification from google.cloud.storage.notification import _TOPIC_REF_FMT from google.cloud.storage.notification import JSON_API_V1_PAYLOAD_FORMAT - NAME = "name" - ETAG = "FACECABB" - NOTIFICATION_ID = "1" - SELF_LINK = "https://example.com/notification/1" - resources = { + project = "my-project-123" + user_project = "user-project-456" + name = "name" + etag = "FACECABB" + notification_id = "1" + self_link = "https://example.com/notification/1" + api_response = { "topic": _TOPIC_REF_FMT.format("my-project-123", "topic-1"), - "id": NOTIFICATION_ID, - "etag": ETAG, - "selfLink": SELF_LINK, + "id": notification_id, + "etag": etag, + "selfLink": self_link, "payload_format": JSON_API_V1_PAYLOAD_FORMAT, } + timeout = 42 + retry = mock.Mock(spec=[]) + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.return_value = api_response + client.project = project + bucket = self._make_one(client=client, name=name, user_project=user_project) - connection = _make_connection(resources) - client = _Client(connection, project="my-project-123") - bucket = self._make_one(client=client, name=NAME) - notification = bucket.get_notification(notification_id=NOTIFICATION_ID) + notification = bucket.get_notification( + notification_id=notification_id, timeout=timeout, retry=retry, + ) - self.assertEqual(notification.notification_id, NOTIFICATION_ID) - self.assertEqual(notification.etag, ETAG) - self.assertEqual(notification.self_link, SELF_LINK) + self.assertIsInstance(notification, BucketNotification) + self.assertEqual(notification.notification_id, notification_id) + self.assertEqual(notification.etag, etag) + self.assertEqual(notification.self_link, self_link) self.assertIsNone(notification.custom_attributes) self.assertIsNone(notification.event_types) self.assertIsNone(notification.blob_name_prefix) self.assertEqual(notification.payload_format, JSON_API_V1_PAYLOAD_FORMAT) - def test_get_notification_miss(self): + expected_path = "/b/{}/notificationConfigs/{}".format(name, notification_id) + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) + + def test_delete_miss_w_defaults(self): from google.cloud.exceptions import NotFound - response = NotFound("testing") - connection = _make_connection(response) - client = _Client(connection, project="my-project-123") - bucket = self._make_one(client=client, name="name") + name = "name" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.side_effect = NotFound("testing") + bucket = self._make_one(client=client, name=name) + with self.assertRaises(NotFound): - bucket.get_notification(notification_id="1") + bucket.delete() - def test_delete_miss(self): - from google.cloud.exceptions import NotFound + expected_query_params = {} + client._delete_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) - NAME = "name" - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - self.assertRaises(NotFound, bucket.delete) - expected_cw = [ - { - "method": "DELETE", - "path": bucket.path, - "query_params": {}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - ] - self.assertEqual(connection._deleted_buckets, expected_cw) + def test_delete_hit_w_metageneration_match_w_explicit_client(self): + name = "name" + metageneration_number = 6 + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=None, name=name) - def test_delete_hit_with_user_project(self): - NAME = "name" - USER_PROJECT = "user-project-123" - GET_BLOBS_RESP = {"items": []} - connection = _Connection(GET_BLOBS_RESP) - connection._delete_bucket = True - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) - result = bucket.delete(force=True, timeout=42) - self.assertIsNone(result) - expected_cw = [ - { - "method": "DELETE", - "path": bucket.path, - "_target_object": None, - "query_params": {"userProject": USER_PROJECT}, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - ] - self.assertEqual(connection._deleted_buckets, expected_cw) + result = bucket.delete( + client=client, if_metageneration_match=metageneration_number, + ) - def test_delete_force_delete_blobs(self): - NAME = "name" - BLOB_NAME1 = "blob-name1" - BLOB_NAME2 = "blob-name2" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} - DELETE_BLOB1_RESP = DELETE_BLOB2_RESP = {} - connection = _Connection(GET_BLOBS_RESP, DELETE_BLOB1_RESP, DELETE_BLOB2_RESP) - connection._delete_bucket = True - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) - result = bucket.delete(force=True) self.assertIsNone(result) - expected_cw = [ - { - "method": "DELETE", - "path": bucket.path, - "query_params": {}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - ] - self.assertEqual(connection._deleted_buckets, expected_cw) - def test_delete_with_metageneration_match(self): - NAME = "name" - BLOB_NAME1 = "blob-name1" - BLOB_NAME2 = "blob-name2" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} - DELETE_BLOB1_RESP = DELETE_BLOB2_RESP = {} - METAGENERATION_NUMBER = 6 - - connection = _Connection(GET_BLOBS_RESP, DELETE_BLOB1_RESP, DELETE_BLOB2_RESP) - connection._delete_bucket = True - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - result = bucket.delete(if_metageneration_match=METAGENERATION_NUMBER) - self.assertIsNone(result) - expected_cw = [ - { - "method": "DELETE", - "path": bucket.path, - "query_params": {"ifMetagenerationMatch": METAGENERATION_NUMBER}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - ] - self.assertEqual(connection._deleted_buckets, expected_cw) + expected_query_params = {"ifMetagenerationMatch": metageneration_number} + client._delete_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) - def test_delete_force_miss_blobs(self): - NAME = "name" - BLOB_NAME = "blob-name1" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME}]} - # Note the connection does not have a response for the blob. - connection = _Connection(GET_BLOBS_RESP) - connection._delete_bucket = True - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) - result = bucket.delete(force=True) - self.assertIsNone(result) - expected_cw = [ - { - "method": "DELETE", - "path": bucket.path, - "query_params": {}, - "_target_object": None, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - ] - self.assertEqual(connection._deleted_buckets, expected_cw) + def test_delete_hit_w_force_w_user_project_w_explicit_timeout_retry(self): + name = "name" + user_project = "user-project-123" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name, user_project=user_project) + bucket.list_blobs = mock.Mock(return_value=iter([])) + bucket.delete_blobs = mock.Mock(return_value=None) + timeout = 42 + retry = mock.Mock(spec=[]) - def test_delete_too_many(self): - NAME = "name" - BLOB_NAME1 = "blob-name1" - BLOB_NAME2 = "blob-name2" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} - connection = _Connection(GET_BLOBS_RESP) - connection._delete_bucket = True - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + result = bucket.delete(force=True, timeout=timeout, retry=retry) - # Make the Bucket refuse to delete with 2 objects. - bucket._MAX_OBJECTS_FOR_ITERATION = 1 - self.assertRaises(ValueError, bucket.delete, force=True) - self.assertEqual(connection._deleted_buckets, []) + self.assertIsNone(result) - def test_delete_blob_miss(self): - from google.cloud.exceptions import NotFound + bucket.list_blobs.assert_called_once_with( + max_results=bucket._MAX_OBJECTS_FOR_ITERATION + 1, + client=client, + timeout=timeout, + retry=retry, + ) + + bucket.delete_blobs.assert_called_once_with( + [], on_error=mock.ANY, client=client, timeout=timeout, retry=retry, + ) + + expected_query_params = {"userProject": user_project} + client._delete_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=None, + ) + + def test_delete_hit_w_force_delete_blobs(self): + name = "name" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name) + blobs = [mock.Mock(spec=[]), mock.Mock(spec=[])] + bucket.list_blobs = mock.Mock(return_value=iter(blobs)) + bucket.delete_blobs = mock.Mock(return_value=None) + + result = bucket.delete(force=True) - NAME = "name" - NONESUCH = "nonesuch" - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - self.assertRaises(NotFound, bucket.delete_blob, NONESUCH) - (kw,) = connection._requested - self.assertEqual(kw["method"], "DELETE") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) - self.assertEqual(kw["query_params"], {}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - - def test_delete_blob_hit_with_user_project(self): - NAME = "name" - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) - result = bucket.delete_blob(BLOB_NAME, timeout=42) self.assertIsNone(result) - (kw,) = connection._requested - self.assertEqual(kw["method"], "DELETE") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw["timeout"], 42) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_delete_blob_hit_with_generation(self): - NAME = "name" - BLOB_NAME = "blob-name" - GENERATION = 1512565576797178 - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - result = bucket.delete_blob(BLOB_NAME, generation=GENERATION) + + bucket.list_blobs.assert_called_once_with( + max_results=bucket._MAX_OBJECTS_FOR_ITERATION + 1, + client=client, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + bucket.delete_blobs.assert_called_once_with( + blobs, + on_error=mock.ANY, + client=client, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + expected_query_params = {} + client._delete_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) + + def test_delete_w_force_w_user_project_w_miss_on_blob(self): + from google.cloud.exceptions import NotFound + + name = "name" + blob_name = "blob-name" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name) + blob = mock.Mock(spec=["name"]) + blob.name = blob_name + blobs = [blob] + bucket.list_blobs = mock.Mock(return_value=iter(blobs)) + bucket.delete_blob = mock.Mock(side_effect=NotFound("testing")) + + result = bucket.delete(force=True) + self.assertIsNone(result) - (kw,) = connection._requested - self.assertEqual(kw["method"], "DELETE") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw["query_params"], {"generation": GENERATION}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_delete_blob_with_generation_match(self): - NAME = "name" - BLOB_NAME = "blob-name" - GENERATION = 6 - METAGENERATION = 9 - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + bucket.delete_blob.assert_called_once_with( + blob_name, + client=client, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + expected_query_params = {} + client._delete_resource.assert_called_once_with( + bucket.path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) + + def test_delete_w_too_many(self): + name = "name" + blob_name1 = "blob-name1" + blob_name2 = "blob-name2" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name) + blob1 = mock.Mock(spec=["name"]) + blob1.name = blob_name1 + blob2 = mock.Mock(spec=["name"]) + blob2.name = blob_name2 + blobs = [blob1, blob2] + bucket.list_blobs = mock.Mock(return_value=iter(blobs)) + bucket.delete_blobs = mock.Mock() + # Make the Bucket refuse to delete with 2 objects. + bucket._MAX_OBJECTS_FOR_ITERATION = 1 + + with self.assertRaises(ValueError): + bucket.delete(force=True) + + bucket.delete_blobs.assert_not_called() + + def test_delete_blob_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + + name = "name" + blob_name = "nonesuch" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.side_effect = NotFound("testing") + bucket = self._make_one(client=client, name=name) + + with self.assertRaises(NotFound): + bucket.delete_blob(blob_name) + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = {} + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=None, + ) + + def test_delete_blob_hit_w_user_project_w_timeout(self): + name = "name" + blob_name = "blob-name" + user_project = "user-project-123" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name, user_project=user_project) + timeout = 42 + + result = bucket.delete_blob(blob_name, timeout=timeout) + + self.assertIsNone(result) + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = {"userProject": user_project} + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=None, + ) + + def test_delete_blob_hit_w_generation_w_retry(self): + name = "name" + blob_name = "blob-name" + generation = 1512565576797178 + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name) + retry = mock.Mock(spec=[]) + + result = bucket.delete_blob(blob_name, generation=generation, retry=retry) + + self.assertIsNone(result) + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = {"generation": generation} + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=None, + ) + + def test_delete_blob_hit_w_generation_match(self): + name = "name" + blob_name = "blob-name" + generation = 6 + metageneration = 9 + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket = self._make_one(client=client, name=name) + result = bucket.delete_blob( - BLOB_NAME, - if_generation_match=GENERATION, - if_metageneration_match=METAGENERATION, + blob_name, + if_generation_match=generation, + if_metageneration_match=metageneration, ) self.assertIsNone(result) - (kw,) = connection._requested - self.assertEqual(kw["method"], "DELETE") - self.assertEqual(kw["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual( - kw["query_params"], - {"ifGenerationMatch": GENERATION, "ifMetagenerationMatch": METAGENERATION}, + + expected_path = "/b/%s/o/%s" % (name, blob_name) + expected_query_params = { + "ifGenerationMatch": generation, + "ifMetagenerationMatch": metageneration, + } + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=None, ) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) def test_delete_blobs_empty(self): - NAME = "name" - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + name = "name" + bucket = self._make_one(client=None, name=name) + bucket.delete_blob = mock.Mock() + bucket.delete_blobs([]) - self.assertEqual(connection._requested, []) - def test_delete_blobs_hit_w_user_project(self): - NAME = "name" - BLOB_NAME = "blob-name" - USER_PROJECT = "user-project-123" - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) - bucket.delete_blobs([BLOB_NAME], timeout=42) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "DELETE") - self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_delete_blobs_w_generation_match(self): - NAME = "name" - BLOB_NAME = "blob-name" - BLOB_NAME2 = "blob-name2" - GENERATION_NUMBER = 6 - GENERATION_NUMBER2 = 9 - - connection = _Connection({}, {}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - bucket.delete_blobs( - [BLOB_NAME, BLOB_NAME2], - timeout=42, - if_generation_match=[GENERATION_NUMBER, GENERATION_NUMBER2], - ) - kw = connection._requested - self.assertEqual(len(kw), 2) + bucket.delete_blob.assert_not_called() - self.assertEqual(kw[0]["method"], "DELETE") - self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual( - kw[0]["query_params"], {"ifGenerationMatch": GENERATION_NUMBER} - ) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(kw[1]["method"], "DELETE") - self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME2)) - self.assertEqual(kw[1]["timeout"], 42) - self.assertEqual( - kw[1]["query_params"], {"ifGenerationMatch": GENERATION_NUMBER2} + def test_delete_blobs_hit_w_explicit_client_w_timeout(self): + name = "name" + blob_name = "blob-name" + client = mock.Mock(spec=[]) + bucket = self._make_one(client=None, name=name) + bucket.delete_blob = mock.Mock() + timeout = 42 + + bucket.delete_blobs([blob_name], client=client, timeout=timeout) + + bucket.delete_blob.assert_called_once_with( + blob_name, + client=client, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=timeout, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) def test_delete_blobs_w_generation_match_wrong_len(self): - NAME = "name" - BLOB_NAME = "blob-name" - BLOB_NAME2 = "blob-name2" - GENERATION_NUMBER = 6 + name = "name" + blob_name = "blob-name" + blob_name2 = "blob-name2" + generation_number = 6 + bucket = self._make_one(client=None, name=name) + bucket.delete_blob = mock.Mock() - connection = _Connection() - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) with self.assertRaises(ValueError): bucket.delete_blobs( - [BLOB_NAME, BLOB_NAME2], - timeout=42, - if_generation_not_match=[GENERATION_NUMBER], + [blob_name, blob_name2], if_generation_not_match=[generation_number], ) + bucket.delete_blob.assert_not_called() + + def test_delete_blobs_w_generation_match_w_retry(self): + name = "name" + blob_name = "blob-name" + blob_name2 = "blob-name2" + generation_number = 6 + generation_number2 = 9 + client = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=name) + bucket.delete_blob = mock.Mock() + retry = mock.Mock(spec=[]) + + bucket.delete_blobs( + [blob_name, blob_name2], + if_generation_match=[generation_number, generation_number2], + retry=retry, + ) + + call_1 = mock.call( + blob_name, + client=None, + if_generation_match=generation_number, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=retry, + ) + call_2 = mock.call( + blob_name2, + client=None, + if_generation_match=generation_number2, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=retry, + ) + bucket.delete_blob.assert_has_calls([call_1, call_2]) + def test_delete_blobs_w_generation_match_none(self): - NAME = "name" - BLOB_NAME = "blob-name" - BLOB_NAME2 = "blob-name2" - GENERATION_NUMBER = 6 - GENERATION_NUMBER2 = None - - connection = _Connection({}, {}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + name = "name" + blob_name = "blob-name" + blob_name2 = "blob-name2" + generation_number = 6 + generation_number2 = None + client = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=name) + bucket.delete_blob = mock.Mock() + bucket.delete_blobs( - [BLOB_NAME, BLOB_NAME2], - timeout=42, - if_generation_match=[GENERATION_NUMBER, GENERATION_NUMBER2], + [blob_name, blob_name2], + if_generation_match=[generation_number, generation_number2], ) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "DELETE") - self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual( - kw[0]["query_params"], {"ifGenerationMatch": GENERATION_NUMBER} + call_1 = mock.call( + blob_name, + client=None, + if_generation_match=generation_number, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, ) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(kw[1]["method"], "DELETE") - self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME2)) - self.assertEqual(kw[1]["timeout"], 42) - self.assertEqual(kw[1]["query_params"], {}) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + call_2 = mock.call( + blob_name2, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + bucket.delete_blob.assert_has_calls([call_1, call_2]) - def test_delete_blobs_miss_no_on_error(self): + def test_delete_blobs_miss_wo_on_error(self): from google.cloud.exceptions import NotFound - NAME = "name" - BLOB_NAME = "blob-name" - NONESUCH = "nonesuch" - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) - self.assertRaises(NotFound, bucket.delete_blobs, [BLOB_NAME, NONESUCH]) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "DELETE") - self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(kw[1]["method"], "DELETE") - self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) - self.assertEqual(kw[1]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + name = "name" + blob_name = "blob-name" + blob_name2 = "nonesuch" + client = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=name) + bucket.delete_blob = mock.Mock() + bucket.delete_blob.side_effect = [None, NotFound("testing")] + + with self.assertRaises(NotFound): + bucket.delete_blobs([blob_name, blob_name2]) + + call_1 = mock.call( + blob_name, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + call_2 = mock.call( + blob_name2, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + bucket.delete_blob.assert_has_calls([call_1, call_2]) def test_delete_blobs_miss_w_on_error(self): - NAME = "name" - BLOB_NAME = "blob-name" - NONESUCH = "nonesuch" - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + from google.cloud.exceptions import NotFound + + name = "name" + blob_name = "blob-name" + blob_name2 = "nonesuch" + client = mock.Mock(spec=[]) + bucket = self._make_one(client=client, name=name) + bucket.delete_blob = mock.Mock() + bucket.delete_blob.side_effect = [None, NotFound("testing")] + errors = [] - bucket.delete_blobs([BLOB_NAME, NONESUCH], errors.append) - self.assertEqual(errors, [NONESUCH]) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "DELETE") - self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME)) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[0]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(kw[1]["method"], "DELETE") - self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, NONESUCH)) - self.assertEqual(kw[1]["timeout"], self._get_default_timeout()) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_reload_bucket_w_metageneration_match(self): - NAME = "name" - METAGENERATION_NUMBER = 9 + bucket.delete_blobs([blob_name, blob_name2], on_error=errors.append) - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + self.assertEqual(errors, [blob_name2]) - bucket.reload(if_metageneration_match=METAGENERATION_NUMBER) + call_1 = mock.call( + blob_name, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + call_2 = mock.call( + blob_name2, + client=None, + if_generation_match=None, + if_generation_not_match=None, + if_metageneration_match=None, + if_metageneration_not_match=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + ) + bucket.delete_blob.assert_has_calls([call_1, call_2]) - self.assertEqual(len(connection._requested), 1) - req = connection._requested[0] - self.assertEqual(req["method"], "GET") - self.assertEqual(req["path"], "/b/%s" % NAME) - self.assertEqual(req["timeout"], self._get_default_timeout()) - self.assertEqual( - req["query_params"], - {"projection": "noAcl", "ifMetagenerationMatch": METAGENERATION_NUMBER}, + def test_reload_w_metageneration_match(self): + name = "name" + metageneration_number = 9 + api_response = {"name": name} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client, name=name) + + bucket.reload(if_metageneration_match=metageneration_number) + + expected_path = "/b/%s" % (name,) + expected_query_params = { + "projection": "noAcl", + "ifMetagenerationMatch": metageneration_number, + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, ) - def test_reload_bucket_w_generation_match(self): - connection = _Connection({}) - client = _Client(connection) + def test_reload_w_generation_match(self): + client = self._make_client() bucket = self._make_one(client=client, name="name") with self.assertRaises(TypeError): bucket.reload(if_generation_match=6) - def test_update_bucket_w_metageneration_match(self): - NAME = "name" - METAGENERATION_NUMBER = 9 - - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + def test_update_w_metageneration_match(self): + name = "name" + metageneration_number = 9 + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = {} + bucket = self._make_one(client=client, name=name) - bucket.update(if_metageneration_match=METAGENERATION_NUMBER) + bucket.update(if_metageneration_match=metageneration_number) - self.assertEqual(len(connection._requested), 1) - req = connection._requested[0] - self.assertEqual(req["method"], "PUT") - self.assertEqual(req["path"], "/b/%s" % NAME) - self.assertEqual(req["timeout"], self._get_default_timeout()) - self.assertEqual( - req["query_params"], - {"projection": "full", "ifMetagenerationMatch": METAGENERATION_NUMBER}, + expected_query_params = { + "projection": "full", + "ifMetagenerationMatch": metageneration_number, + } + client._put_resource.assert_called_once_with( + bucket.path, + bucket._properties, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=bucket, ) - self.assertEqual(req["retry"], DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED) - def test_update_bucket_w_generation_match(self): - connection = _Connection({}) - client = _Client(connection) - bucket = self._make_one(client=client, name="name") + def test_update_w_generation_match(self): + name = "name" + generation_number = 6 + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = {} + bucket = self._make_one(client=client, name=name) with self.assertRaises(TypeError): - bucket.update(if_generation_match=6) + bucket.update(if_generation_match=generation_number) + + client._put_resource.assert_not_called() @staticmethod def _make_blob(bucket_name, blob_name): @@ -1441,277 +1730,287 @@ def _make_blob(bucket_name, blob_name): return blob def test_copy_blobs_wo_name(self): - SOURCE = "source" - DEST = "dest" - BLOB_NAME = "blob-name" - connection = _Connection({}) - client = _Client(connection) - source = self._make_one(client=client, name=SOURCE) - dest = self._make_one(client=client, name=DEST) - blob = self._make_blob(SOURCE, BLOB_NAME) + source_name = "source" + dest_name = "dest" + blob_name = "blob-name" + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + source = self._make_one(client=client, name=source_name) + dest = self._make_one(client=client, name=dest_name) + blob = self._make_blob(source_name, blob_name) - new_blob = source.copy_blob(blob, dest, timeout=42) + new_blob = source.copy_blob(blob, dest) self.assertIs(new_blob.bucket, dest) - self.assertEqual(new_blob.name, BLOB_NAME) + self.assertEqual(new_blob.name, blob_name) - (kw,) = connection._requested - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - SOURCE, BLOB_NAME, DEST, BLOB_NAME + expected_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + source_name, blob_name, dest_name, blob_name + ) + expected_data = None + expected_query_params = {} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=new_blob, ) - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual(kw["query_params"], {}) - self.assertEqual(kw["timeout"], 42) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_copy_blobs_source_generation(self): - SOURCE = "source" - DEST = "dest" - BLOB_NAME = "blob-name" - GENERATION = 1512565576797178 - connection = _Connection({}) - client = _Client(connection) - source = self._make_one(client=client, name=SOURCE) - dest = self._make_one(client=client, name=DEST) - blob = self._make_blob(SOURCE, BLOB_NAME) + def test_copy_blob_w_source_generation_w_timeout(self): + source_name = "source" + dest_name = "dest" + blob_name = "blob-name" + generation = 1512565576797178 + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + source = self._make_one(client=client, name=source_name) + dest = self._make_one(client=client, name=dest_name) + blob = self._make_blob(source_name, blob_name) + timeout = 42 - new_blob = source.copy_blob(blob, dest, source_generation=GENERATION) + new_blob = source.copy_blob( + blob, dest, source_generation=generation, timeout=timeout, + ) self.assertIs(new_blob.bucket, dest) - self.assertEqual(new_blob.name, BLOB_NAME) - - (kw,) = connection._requested - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - SOURCE, BLOB_NAME, DEST, BLOB_NAME - ) - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual(kw["query_params"], {"sourceGeneration": GENERATION}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_copy_blobs_w_generation_match(self): - SOURCE = "source" - DEST = "dest" - BLOB_NAME = "blob-name" - GENERATION_NUMBER = 6 - SOURCE_GENERATION_NUMBER = 9 - - connection = _Connection({}) - client = _Client(connection) - source = self._make_one(client=client, name=SOURCE) - dest = self._make_one(client=client, name=DEST) - blob = self._make_blob(SOURCE, BLOB_NAME) + self.assertEqual(new_blob.name, blob_name) + + expected_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + source_name, blob_name, dest_name, blob_name + ) + expected_data = None + expected_query_params = {"sourceGeneration": generation} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=new_blob, + ) + + def test_copy_blob_w_generation_match_w_retry(self): + source_name = "source" + dest_name = "dest" + blob_name = "blob-name" + generation_number = 6 + source_generation_number = 9 + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + source = self._make_one(client=client, name=source_name) + dest = self._make_one(client=client, name=dest_name) + blob = self._make_blob(source_name, blob_name) + retry = mock.Mock(spec=[]) new_blob = source.copy_blob( blob, dest, - if_generation_match=GENERATION_NUMBER, - if_source_generation_match=SOURCE_GENERATION_NUMBER, + if_generation_match=generation_number, + if_source_generation_match=source_generation_number, + retry=retry, ) self.assertIs(new_blob.bucket, dest) - self.assertEqual(new_blob.name, BLOB_NAME) + self.assertEqual(new_blob.name, blob_name) - (kw,) = connection._requested - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - SOURCE, BLOB_NAME, DEST, BLOB_NAME + expected_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + source_name, blob_name, dest_name, blob_name ) - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual( - kw["query_params"], - { - "ifGenerationMatch": GENERATION_NUMBER, - "ifSourceGenerationMatch": SOURCE_GENERATION_NUMBER, - }, + expected_data = None + expected_query_params = { + "ifGenerationMatch": generation_number, + "ifSourceGenerationMatch": source_generation_number, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=new_blob, ) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - def test_copy_blobs_preserve_acl(self): + def test_copy_blob_w_preserve_acl_false_w_explicit_client(self): from google.cloud.storage.acl import ObjectACL - SOURCE = "source" - DEST = "dest" - BLOB_NAME = "blob-name" - NEW_NAME = "new_name" - - connection = _Connection({}, {}) - client = _Client(connection) - source = self._make_one(client=client, name=SOURCE) - dest = self._make_one(client=client, name=DEST) - blob = self._make_blob(SOURCE, BLOB_NAME) + source_name = "source" + dest_name = "dest" + blob_name = "blob-name" + new_name = "new_name" + post_api_response = {} + patch_api_response = {} + client = mock.Mock(spec=["_post_resource", "_patch_resource"]) + client._post_resource.return_value = post_api_response + client._patch_resource.return_value = patch_api_response + source = self._make_one(client=None, name=source_name) + dest = self._make_one(client=None, name=dest_name) + blob = self._make_blob(source_name, blob_name) new_blob = source.copy_blob( - blob, dest, NEW_NAME, client=client, preserve_acl=False + blob, dest, new_name, client=client, preserve_acl=False ) self.assertIs(new_blob.bucket, dest) - self.assertEqual(new_blob.name, NEW_NAME) + self.assertEqual(new_blob.name, new_name) self.assertIsInstance(new_blob.acl, ObjectACL) - kw1, kw2 = connection._requested - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - SOURCE, BLOB_NAME, DEST, NEW_NAME + expected_copy_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + source_name, blob_name, dest_name, new_name + ) + expected_copy_data = None + expected_copy_query_params = {} + client._post_resource.assert_called_once_with( + expected_copy_path, + expected_copy_data, + query_params=expected_copy_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=new_blob, ) - NEW_BLOB_PATH = "/b/{}/o/{}".format(DEST, NEW_NAME) - - self.assertEqual(kw1["method"], "POST") - self.assertEqual(kw1["path"], COPY_PATH) - self.assertEqual(kw1["query_params"], {}) - self.assertEqual(kw1["timeout"], self._get_default_timeout()) - self.assertEqual(kw1["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertEqual(kw2["method"], "PATCH") - self.assertEqual(kw2["path"], NEW_BLOB_PATH) - self.assertEqual(kw2["query_params"], {"projection": "full"}) - self.assertEqual(kw2["timeout"], self._get_default_timeout()) + expected_patch_path = "/b/{}/o/{}".format(dest_name, new_name) + expected_patch_data = {"acl": []} + expected_patch_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + expected_patch_path, + expected_patch_data, + query_params=expected_patch_query_params, + timeout=self._get_default_timeout(), + retry=None, + ) - def test_copy_blobs_w_name_and_user_project(self): - SOURCE = "source" - DEST = "dest" - BLOB_NAME = "blob-name" - NEW_NAME = "new_name" - USER_PROJECT = "user-project-123" - connection = _Connection({}) - client = _Client(connection) - source = self._make_one(client=client, name=SOURCE, user_project=USER_PROJECT) - dest = self._make_one(client=client, name=DEST) - blob = self._make_blob(SOURCE, BLOB_NAME) + def test_copy_blob_w_name_and_user_project(self): + source_name = "source" + dest_name = "dest" + blob_name = "blob-name" + new_name = "new_name" + user_project = "user-project-123" + api_response = {} + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response + source = self._make_one( + client=client, name=source_name, user_project=user_project + ) + dest = self._make_one(client=client, name=dest_name) + blob = self._make_blob(source_name, blob_name) - new_blob = source.copy_blob(blob, dest, NEW_NAME) + new_blob = source.copy_blob(blob, dest, new_name) self.assertIs(new_blob.bucket, dest) - self.assertEqual(new_blob.name, NEW_NAME) + self.assertEqual(new_blob.name, new_name) - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - SOURCE, BLOB_NAME, DEST, NEW_NAME + expected_path = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( + source_name, blob_name, dest_name, new_name ) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual(kw["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - - def test_rename_blob(self): - BUCKET_NAME = "BUCKET_NAME" - BLOB_NAME = "blob-name" - NEW_BLOB_NAME = "new-blob-name" - DATA = {"name": NEW_BLOB_NAME} - connection = _Connection(DATA) - client = _Client(connection) - bucket = self._make_one(client=client, name=BUCKET_NAME) - blob = self._make_blob(BUCKET_NAME, BLOB_NAME) - - renamed_blob = bucket.rename_blob( - blob, NEW_BLOB_NAME, client=client, timeout=42 + expected_data = None + expected_query_params = {"userProject": user_project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + _target_object=new_blob, ) - self.assertIs(renamed_blob.bucket, bucket) - self.assertEqual(renamed_blob.name, NEW_BLOB_NAME) + def _rename_blob_helper(self, explicit_client=False, same_name=False, **kw): + bucket_name = "BUCKET_NAME" + blob_name = "blob-name" + + if same_name: + new_blob_name = blob_name + else: + new_blob_name = "new-blob-name" - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - BUCKET_NAME, BLOB_NAME, BUCKET_NAME, NEW_BLOB_NAME - ) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual(kw["query_params"], {}) - self.assertEqual(kw["timeout"], 42) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + client = mock.Mock(spec=[]) + kw = kw.copy() - blob.delete.assert_called_once_with( - client=client, - timeout=42, - if_generation_match=None, - if_generation_not_match=None, - if_metageneration_match=None, - if_metageneration_not_match=None, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, - ) + if explicit_client: + bucket = self._make_one(client=None, name=bucket_name) + expected_client = kw["client"] = client + else: + bucket = self._make_one(client=client, name=bucket_name) + expected_client = None - def test_rename_blob_with_generation_match(self): - BUCKET_NAME = "BUCKET_NAME" - BLOB_NAME = "blob-name" - NEW_BLOB_NAME = "new-blob-name" - DATA = {"name": NEW_BLOB_NAME} - GENERATION_NUMBER = 6 - SOURCE_GENERATION_NUMBER = 7 - SOURCE_METAGENERATION_NUMBER = 9 - - connection = _Connection(DATA) - client = _Client(connection) - bucket = self._make_one(client=client, name=BUCKET_NAME) - blob = self._make_blob(BUCKET_NAME, BLOB_NAME) - - renamed_blob = bucket.rename_blob( - blob, - NEW_BLOB_NAME, - client=client, - timeout=42, - if_generation_match=GENERATION_NUMBER, - if_source_generation_match=SOURCE_GENERATION_NUMBER, - if_source_metageneration_not_match=SOURCE_METAGENERATION_NUMBER, - ) + expected_i_g_m = kw.get("if_generation_match") + expected_i_g_n_m = kw.get("if_generation_not_match") + expected_i_m_m = kw.get("if_metageneration_match") + expected_i_m_n_m = kw.get("if_metageneration_not_match") + expected_i_s_g_m = kw.get("if_source_generation_match") + expected_i_s_g_n_m = kw.get("if_source_generation_not_match") + expected_i_s_m_m = kw.get("if_source_metageneration_match") + expected_i_s_m_n_m = kw.get("if_source_metageneration_not_match") + expected_timeout = kw.get("timeout", self._get_default_timeout()) + expected_retry = kw.get("retry", DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - self.assertIs(renamed_blob.bucket, bucket) - self.assertEqual(renamed_blob.name, NEW_BLOB_NAME) + bucket.copy_blob = mock.Mock(spec=[]) + blob = self._make_blob(bucket_name, blob_name) - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - BUCKET_NAME, BLOB_NAME, BUCKET_NAME, NEW_BLOB_NAME - ) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual( - kw["query_params"], - { - "ifGenerationMatch": GENERATION_NUMBER, - "ifSourceGenerationMatch": SOURCE_GENERATION_NUMBER, - "ifSourceMetagenerationNotMatch": SOURCE_METAGENERATION_NUMBER, - }, - ) - self.assertEqual(kw["timeout"], 42) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) + renamed_blob = bucket.rename_blob(blob, new_blob_name, **kw) - blob.delete.assert_called_once_with( - client=client, - timeout=42, - if_generation_match=SOURCE_GENERATION_NUMBER, - if_generation_not_match=None, - if_metageneration_match=None, - if_metageneration_not_match=SOURCE_METAGENERATION_NUMBER, - retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + self.assertIs(renamed_blob, bucket.copy_blob.return_value) + + bucket.copy_blob.assert_called_once_with( + blob, + bucket, + new_blob_name, + client=expected_client, + if_generation_match=expected_i_g_m, + if_generation_not_match=expected_i_g_n_m, + if_metageneration_match=expected_i_m_m, + if_metageneration_not_match=expected_i_m_n_m, + if_source_generation_match=expected_i_s_g_m, + if_source_generation_not_match=expected_i_s_g_n_m, + if_source_metageneration_match=expected_i_s_m_m, + if_source_metageneration_not_match=expected_i_s_m_n_m, + timeout=expected_timeout, + retry=expected_retry, ) - def test_rename_blob_to_itself(self): - BUCKET_NAME = "BUCKET_NAME" - BLOB_NAME = "blob-name" - DATA = {"name": BLOB_NAME} - connection = _Connection(DATA) - client = _Client(connection) - bucket = self._make_one(client=client, name=BUCKET_NAME) - blob = self._make_blob(BUCKET_NAME, BLOB_NAME) + if same_name: + blob.delete.assert_not_called() + else: + blob.delete.assert_called_once_with( + client=expected_client, + if_generation_match=expected_i_s_g_m, + if_generation_not_match=expected_i_s_g_n_m, + if_metageneration_match=expected_i_s_m_m, + if_metageneration_not_match=expected_i_s_m_n_m, + timeout=expected_timeout, + retry=expected_retry, + ) - renamed_blob = bucket.rename_blob(blob, BLOB_NAME) + def test_rename_blob_w_defaults(self): + self._rename_blob_helper() - self.assertIs(renamed_blob.bucket, bucket) - self.assertEqual(renamed_blob.name, BLOB_NAME) + def test_rename_blob_w_explicit_client(self): + self._rename_blob_helper(explicit_client=True) - COPY_PATH = "/b/{}/o/{}/copyTo/b/{}/o/{}".format( - BUCKET_NAME, BLOB_NAME, BUCKET_NAME, BLOB_NAME + def test_rename_blob_w_generation_match(self): + generation_number = 6 + source_generation_number = 7 + source_metageneration_number = 9 + + self._rename_blob_helper( + if_generation_match=generation_number, + if_source_generation_match=source_generation_number, + if_source_metageneration_not_match=source_metageneration_number, ) - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], COPY_PATH) - self.assertEqual(kw["query_params"], {}) - self.assertEqual(kw["timeout"], self._get_default_timeout()) - self.assertEqual(kw["retry"], DEFAULT_RETRY_IF_GENERATION_SPECIFIED) - blob.delete.assert_not_called() + def test_rename_blob_w_timeout(self): + timeout = 42 + self._rename_blob_helper(timeout=timeout) + + def test_rename_blob_w_retry(self): + retry = mock.Mock(spec={}) + self._rename_blob_helper(retry=retry) + + def test_rename_blob_to_itself(self): + self._rename_blob_helper(same_name=True) def test_etag(self): ETAG = "ETAG" @@ -1783,15 +2082,22 @@ def test_iam_configuration_policy_w_entry(self): self.assertTrue(config.uniform_bucket_level_access_enabled) self.assertEqual(config.uniform_bucket_level_access_locked_time, now) - def test_lifecycle_rules_getter_unknown_action_type(self): + @mock.patch("warnings.warn") + def test_lifecycle_rules_getter_unknown_action_type(self, mock_warn): NAME = "name" BOGUS_RULE = {"action": {"type": "Bogus"}, "condition": {"age": 42}} rules = [BOGUS_RULE] properties = {"lifecycle": {"rule": rules}} bucket = self._make_one(name=NAME, properties=properties) - with self.assertRaises(ValueError): - list(bucket.lifecycle_rules) + list(bucket.lifecycle_rules) + mock_warn.assert_called_with( + "Unknown lifecycle rule type received: {}. Please upgrade to the latest version of google-cloud-storage.".format( + BOGUS_RULE + ), + UserWarning, + stacklevel=1, + ) def test_lifecycle_rules_getter(self): from google.cloud.storage.bucket import ( @@ -2005,23 +2311,39 @@ def test_labels_setter_with_removal(self): self.assertEqual(bucket.labels, {"color": "red"}) # Make sure that a patch call correctly removes the flavor label. - client = mock.NonCallableMock(spec=("_connection",)) - client._connection = mock.NonCallableMock(spec=("api_request",)) + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = {} + bucket.patch(client=client) - client._connection.api_request.assert_called() - _, _, kwargs = client._connection.api_request.mock_calls[0] - self.assertEqual(len(kwargs["data"]["labels"]), 2) - self.assertEqual(kwargs["data"]["labels"]["color"], "red") - self.assertIsNone(kwargs["data"]["labels"]["flavor"]) - self.assertEqual(kwargs["timeout"], self._get_default_timeout()) + + expected_patch_data = { + "labels": {"color": "red", "flavor": None}, + } + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + bucket.path, + expected_patch_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=bucket, + ) # A second patch call should be a no-op for labels. - client._connection.api_request.reset_mock() + client._patch_resource.reset_mock() + bucket.patch(client=client, timeout=42) - client._connection.api_request.assert_called() - _, _, kwargs = client._connection.api_request.mock_calls[0] - self.assertNotIn("labels", kwargs["data"]) - self.assertEqual(kwargs["timeout"], 42) + + expected_patch_data = {} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + bucket.path, + expected_patch_data, + query_params=expected_query_params, + timeout=42, + retry=DEFAULT_RETRY_IF_METAGENERATION_SPECIFIED, + _target_object=bucket, + ) def test_location_type_getter_unset(self): bucket = self._make_one() @@ -2302,23 +2624,22 @@ def test_versioning_enabled_getter(self): self.assertEqual(bucket.versioning_enabled, True) @mock.patch("warnings.warn") - def test_create_deprecated(self, mock_warn): - PROJECT = "PROJECT" - BUCKET_NAME = "bucket-name" - DATA = {"name": BUCKET_NAME} - connection = _make_connection(DATA) - client = self._make_client(project=PROJECT) - client._base_connection = connection + def test_create_w_defaults_deprecated(self, mock_warn): + bucket_name = "bucket-name" + api_response = {"name": bucket_name} + client = mock.Mock(spec=["create_bucket"]) + client.create_bucket.return_value = api_response + bucket = self._make_one(client=client, name=bucket_name) - bucket = self._make_one(client=client, name=BUCKET_NAME) bucket.create() - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": PROJECT}, - data=DATA, - _target_object=bucket, + client.create_bucket.assert_called_once_with( + bucket_or_name=bucket, + project=None, + user_project=None, + location=None, + predefined_acl=None, + predefined_default_object_acl=None, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, ) @@ -2331,26 +2652,40 @@ def test_create_deprecated(self, mock_warn): ) @mock.patch("warnings.warn") - def test_create_w_user_project(self, mock_warn): - PROJECT = "PROJECT" - BUCKET_NAME = "bucket-name" - DATA = {"name": BUCKET_NAME} - connection = _make_connection(DATA) - client = self._make_client(project=PROJECT) - client._base_connection = connection - - bucket = self._make_one(client=client, name=BUCKET_NAME) - bucket._user_project = "USER_PROJECT" - bucket.create() + def test_create_w_explicit_deprecated(self, mock_warn): + project = "PROJECT" + location = "eu" + user_project = "USER_PROJECT" + bucket_name = "bucket-name" + predefined_acl = "authenticatedRead" + predefined_default_object_acl = "bucketOwnerFullControl" + api_response = {"name": bucket_name} + client = mock.Mock(spec=["create_bucket"]) + client.create_bucket.return_value = api_response + bucket = self._make_one(client=None, name=bucket_name) + bucket._user_project = user_project + timeout = 42 + retry = mock.Mock(spec=[]) + + bucket.create( + client=client, + project=project, + location=location, + predefined_acl=predefined_acl, + predefined_default_object_acl=predefined_default_object_acl, + timeout=timeout, + retry=retry, + ) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": PROJECT, "userProject": "USER_PROJECT"}, - data=DATA, - _target_object=bucket, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + client.create_bucket.assert_called_once_with( + bucket_or_name=bucket, + project=project, + user_project=user_project, + location=location, + predefined_acl=predefined_acl, + predefined_default_object_acl=predefined_default_object_acl, + timeout=timeout, + retry=retry, ) mock_warn.assert_called_with( @@ -2408,345 +2743,400 @@ def test_disable_website(self): bucket.disable_website() self.assertEqual(bucket._properties, UNSET) - def test_get_iam_policy(self): + def test_get_iam_policy_defaults(self): from google.cloud.storage.iam import STORAGE_OWNER_ROLE from google.cloud.storage.iam import STORAGE_EDITOR_ROLE from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.api_core.iam import Policy - NAME = "name" - PATH = "/b/%s" % (NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - EDITOR1 = "domain:google.com" - EDITOR2 = "user:phred@example.com" - VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" - VIEWER2 = "user:phred@example.com" - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, + bucket_name = "name" + path = "/b/%s" % (bucket_name,) + etag = "DEADBEEF" + version = 1 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + editor1 = "domain:google.com" + editor2 = "user:phred@example.com" + viewer1 = "serviceAccount:1234-abcdef@service.example.com" + viewer2 = "user:phred@example.com" + api_response = { + "resourceId": path, + "etag": etag, + "version": version, "bindings": [ - {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, - {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, - {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + {"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [editor1, editor2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [viewer1, viewer2]}, ], } - EXPECTED = { - binding["role"]: set(binding["members"]) for binding in RETURNED["bindings"] + expected_policy = { + binding["role"]: set(binding["members"]) + for binding in api_response["bindings"] } - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client=client, name=bucket_name) - policy = bucket.get_iam_policy(timeout=42) + policy = bucket.get_iam_policy() self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED["etag"]) - self.assertEqual(policy.version, RETURNED["version"]) - self.assertEqual(dict(policy), EXPECTED) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {}) - self.assertEqual(kw[0]["timeout"], 42) - - def test_get_iam_policy_w_user_project(self): + self.assertEqual(policy.etag, api_response["etag"]) + self.assertEqual(policy.version, api_response["version"]) + self.assertEqual(dict(policy), expected_policy) + + expected_path = "/b/%s/iam" % (bucket_name,) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) + + def test_get_iam_policy_w_user_project_w_timeout(self): from google.api_core.iam import Policy - NAME = "name" - USER_PROJECT = "user-project-123" - PATH = "/b/%s" % (NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, + bucket_name = "name" + timeout = 42 + user_project = "user-project-123" + path = "/b/%s" % (bucket_name,) + etag = "DEADBEEF" + version = 1 + api_response = { + "resourceId": path, + "etag": etag, + "version": version, "bindings": [], } - EXPECTED = {} - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) + expected_policy = {} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one( + client=client, name=bucket_name, user_project=user_project + ) - policy = bucket.get_iam_policy() + policy = bucket.get_iam_policy(timeout=timeout) self.assertIsInstance(policy, Policy) - self.assertEqual(policy.etag, RETURNED["etag"]) - self.assertEqual(policy.version, RETURNED["version"]) - self.assertEqual(dict(policy), EXPECTED) - - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - - def test_get_iam_policy_w_requested_policy_version(self): + self.assertEqual(policy.etag, api_response["etag"]) + self.assertEqual(policy.version, api_response["version"]) + self.assertEqual(dict(policy), expected_policy) + + expected_path = "/b/%s/iam" % (bucket_name,) + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=None, + ) + + def test_get_iam_policy_w_requested_policy_version_w_retry(self): from google.cloud.storage.iam import STORAGE_OWNER_ROLE - NAME = "name" - PATH = "/b/%s" % (NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - RETURNED = { - "resourceId": PATH, - "etag": ETAG, - "version": VERSION, - "bindings": [{"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}], + bucket_name = "name" + path = "/b/%s" % (bucket_name,) + etag = "DEADBEEF" + version = 3 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + api_response = { + "resourceId": path, + "etag": etag, + "version": version, + "bindings": [{"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}], } - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME) + retry = mock.Mock(spec=[]) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client=client, name=bucket_name) - policy = bucket.get_iam_policy(requested_policy_version=3) + policy = bucket.get_iam_policy(requested_policy_version=3, retry=retry) - self.assertEqual(policy.version, VERSION) + self.assertEqual(policy.version, version) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"optionsRequestedPolicyVersion": 3}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + expected_path = "/b/%s/iam" % (bucket_name,) + expected_query_params = {"optionsRequestedPolicyVersion": version} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=None, + ) - def test_set_iam_policy(self): + def test_set_iam_policy_w_defaults(self): import operator from google.cloud.storage.iam import STORAGE_OWNER_ROLE from google.cloud.storage.iam import STORAGE_EDITOR_ROLE from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.api_core.iam import Policy - NAME = "name" - PATH = "/b/%s" % (NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - EDITOR1 = "domain:google.com" - EDITOR2 = "user:phred@example.com" - VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" - VIEWER2 = "user:phred@example.com" - BINDINGS = [ - {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, - {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, - {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + name = "name" + etag = "DEADBEEF" + version = 1 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + editor1 = "domain:google.com" + editor2 = "user:phred@example.com" + viewer1 = "serviceAccount:1234-abcdef@service.example.com" + viewer2 = "user:phred@example.com" + bindings = [ + {"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [editor1, editor2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [viewer1, viewer2]}, ] - RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} policy = Policy() - for binding in BINDINGS: + for binding in bindings: policy[binding["role"]] = binding["members"] - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME) + api_response = {"etag": etag, "version": version, "bindings": bindings} + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + bucket = self._make_one(client=client, name=name) - returned = bucket.set_iam_policy(policy, timeout=42) + returned = bucket.set_iam_policy(policy) - self.assertEqual(returned.etag, ETAG) - self.assertEqual(returned.version, VERSION) + self.assertEqual(returned.etag, etag) + self.assertEqual(returned.version, version) self.assertEqual(dict(returned), dict(policy)) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {}) - self.assertEqual(kw[0]["timeout"], 42) - sent = kw[0]["data"] - self.assertEqual(sent["resourceId"], PATH) - self.assertEqual(len(sent["bindings"]), len(BINDINGS)) + expected_path = "%s/iam" % (bucket.path,) + expected_data = { + "resourceId": bucket.path, + "bindings": mock.ANY, + } + expected_query_params = {} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_ETAG_IN_JSON, + _target_object=None, + ) + + sent_bindings = client._put_resource.call_args.args[1]["bindings"] key = operator.itemgetter("role") for found, expected in zip( - sorted(sent["bindings"], key=key), sorted(BINDINGS, key=key) + sorted(sent_bindings, key=key), sorted(bindings, key=key) ): self.assertEqual(found["role"], expected["role"]) self.assertEqual(sorted(found["members"]), sorted(expected["members"])) - def test_set_iam_policy_w_user_project(self): + def test_set_iam_policy_w_user_project_w_expl_client_w_timeout_retry(self): import operator from google.cloud.storage.iam import STORAGE_OWNER_ROLE from google.cloud.storage.iam import STORAGE_EDITOR_ROLE from google.cloud.storage.iam import STORAGE_VIEWER_ROLE from google.api_core.iam import Policy - NAME = "name" - USER_PROJECT = "user-project-123" - PATH = "/b/%s" % (NAME,) - ETAG = "DEADBEEF" - VERSION = 1 - OWNER1 = "user:phred@example.com" - OWNER2 = "group:cloud-logs@google.com" - EDITOR1 = "domain:google.com" - EDITOR2 = "user:phred@example.com" - VIEWER1 = "serviceAccount:1234-abcdef@service.example.com" - VIEWER2 = "user:phred@example.com" - BINDINGS = [ - {"role": STORAGE_OWNER_ROLE, "members": [OWNER1, OWNER2]}, - {"role": STORAGE_EDITOR_ROLE, "members": [EDITOR1, EDITOR2]}, - {"role": STORAGE_VIEWER_ROLE, "members": [VIEWER1, VIEWER2]}, + name = "name" + user_project = "user-project-123" + etag = "DEADBEEF" + version = 1 + owner1 = "user:phred@example.com" + owner2 = "group:cloud-logs@google.com" + editor1 = "domain:google.com" + editor2 = "user:phred@example.com" + viewer1 = "serviceAccount:1234-abcdef@service.example.com" + viewer2 = "user:phred@example.com" + bindings = [ + {"role": STORAGE_OWNER_ROLE, "members": [owner1, owner2]}, + {"role": STORAGE_EDITOR_ROLE, "members": [editor1, editor2]}, + {"role": STORAGE_VIEWER_ROLE, "members": [viewer1, viewer2]}, ] - RETURNED = {"etag": ETAG, "version": VERSION, "bindings": BINDINGS} policy = Policy() - for binding in BINDINGS: + for binding in bindings: policy[binding["role"]] = binding["members"] - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) + api_response = {"etag": etag, "version": version, "bindings": bindings} + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = api_response + bucket = self._make_one(client=None, name=name, user_project=user_project) + timeout = 42 + retry = mock.Mock(spec=[]) - returned = bucket.set_iam_policy(policy) + returned = bucket.set_iam_policy( + policy, client=client, timeout=timeout, retry=retry + ) - self.assertEqual(returned.etag, ETAG) - self.assertEqual(returned.version, VERSION) + self.assertEqual(returned.etag, etag) + self.assertEqual(returned.version, version) self.assertEqual(dict(returned), dict(policy)) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PUT") - self.assertEqual(kw[0]["path"], "%s/iam" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) - sent = kw[0]["data"] - self.assertEqual(sent["resourceId"], PATH) - self.assertEqual(len(sent["bindings"]), len(BINDINGS)) + expected_path = "%s/iam" % (bucket.path,) + expected_data = { + "resourceId": bucket.path, + "bindings": mock.ANY, + } + expected_query_params = {"userProject": user_project} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=None, + ) + + sent_bindings = client._put_resource.call_args.args[1]["bindings"] key = operator.itemgetter("role") for found, expected in zip( - sorted(sent["bindings"], key=key), sorted(BINDINGS, key=key) + sorted(sent_bindings, key=key), sorted(bindings, key=key) ): self.assertEqual(found["role"], expected["role"]) self.assertEqual(sorted(found["members"]), sorted(expected["members"])) - def test_test_iam_permissions(self): + def test_test_iam_permissions_defaults(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - NAME = "name" - PATH = "/b/%s" % (NAME,) - PERMISSIONS = [ + name = "name" + permissions = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] - ALLOWED = PERMISSIONS[1:] - RETURNED = {"permissions": ALLOWED} - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME) + expected = permissions[1:] + api_response = {"permissions": expected} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client=client, name=name) - allowed = bucket.test_iam_permissions(PERMISSIONS, timeout=42) + found = bucket.test_iam_permissions(permissions) - self.assertEqual(allowed, ALLOWED) + self.assertEqual(found, expected) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) - self.assertEqual(kw[0]["query_params"], {"permissions": PERMISSIONS}) - self.assertEqual(kw[0]["timeout"], 42) + expected_path = "/b/%s/iam/testPermissions" % (name,) + expected_query_params = {} + expected_query_params = {"permissions": permissions} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, + ) - def test_test_iam_permissions_w_user_project(self): + def test_test_iam_permissions_w_user_project_w_timeout_w_retry(self): from google.cloud.storage.iam import STORAGE_OBJECTS_LIST from google.cloud.storage.iam import STORAGE_BUCKETS_GET from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE - NAME = "name" - USER_PROJECT = "user-project-123" - PATH = "/b/%s" % (NAME,) - PERMISSIONS = [ + name = "name" + user_project = "user-project-123" + timeout = 42 + retry = mock.Mock(spec=[]) + permissions = [ STORAGE_OBJECTS_LIST, STORAGE_BUCKETS_GET, STORAGE_BUCKETS_UPDATE, ] - ALLOWED = PERMISSIONS[1:] - RETURNED = {"permissions": ALLOWED} - connection = _Connection(RETURNED) - client = _Client(connection, None) - bucket = self._make_one(client=client, name=NAME, user_project=USER_PROJECT) + expected = permissions[1:] + api_response = {"permissions": expected} + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = api_response + bucket = self._make_one(client=client, name=name, user_project=user_project) - allowed = bucket.test_iam_permissions(PERMISSIONS) + found = bucket.test_iam_permissions(permissions, timeout=timeout, retry=retry) - self.assertEqual(allowed, ALLOWED) + self.assertEqual(found, expected) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "GET") - self.assertEqual(kw[0]["path"], "%s/iam/testPermissions" % (PATH,)) - self.assertEqual( - kw[0]["query_params"], - {"permissions": PERMISSIONS, "userProject": USER_PROJECT}, + expected_path = "/b/%s/iam/testPermissions" % (name,) + expected_query_params = { + "permissions": permissions, + "userProject": user_project, + } + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=None, ) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) def test_make_public_defaults(self): from google.cloud.storage.acl import _ACLEntity - NAME = "name" + name = "name" permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - after = {"acl": permissive, "defaultObjectAcl": []} - connection = _Connection(after) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + api_response = {"acl": permissive, "defaultObjectAcl": []} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True + bucket.make_public() + self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": after["acl"]}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + + expected_path = bucket.path + expected_data = {"acl": permissive} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, + ) def _make_public_w_future_helper(self, default_object_acl_loaded=True): from google.cloud.storage.acl import _ACLEntity - NAME = "name" + name = "name" + get_api_response = {"items": []} permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - after1 = {"acl": permissive, "defaultObjectAcl": []} - after2 = {"acl": permissive, "defaultObjectAcl": permissive} - if default_object_acl_loaded: - num_requests = 2 - connection = _Connection(after1, after2) - else: - num_requests = 3 - # We return the same value for default_object_acl.reload() - # to consume. - connection = _Connection(after1, after1, after2) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + acl_patched_response = {"acl": permissive, "defaultObjectAcl": []} + dac_patched_response = {"acl": permissive, "defaultObjectAcl": permissive} + client = mock.Mock(spec=["_get_resource", "_patch_resource"]) + client._get_resource.return_value = get_api_response + client._patch_resource.side_effect = [ + acl_patched_response, + dac_patched_response, + ] + + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = default_object_acl_loaded + bucket.make_public(future=True) + self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), permissive) - kw = connection._requested - self.assertEqual(len(kw), num_requests) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": permissive}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + + self.assertEqual(len(client._patch_resource.call_args_list), 2) + expected_acl_data = {"acl": permissive} + expected_dac_data = {"defaultObjectAcl": permissive} + expected_kw = { + "query_params": {"projection": "full"}, + "timeout": self._get_default_timeout(), + "retry": None, + } + client._patch_resource.assert_has_calls( + [ + ((bucket.path, expected_acl_data), expected_kw), + ((bucket.path, expected_dac_data), expected_kw), + ] + ) + if not default_object_acl_loaded: - self.assertEqual(kw[1]["method"], "GET") - self.assertEqual(kw[1]["path"], "/b/%s/defaultObjectAcl" % NAME) - # Last could be 1 or 2 depending on `default_object_acl_loaded`. - self.assertEqual(kw[-1]["method"], "PATCH") - self.assertEqual(kw[-1]["path"], "/b/%s" % NAME) - self.assertEqual(kw[-1]["data"], {"defaultObjectAcl": permissive}) - self.assertEqual(kw[-1]["query_params"], {"projection": "full"}) - self.assertEqual(kw[-1]["timeout"], self._get_default_timeout()) + expected_path = "/b/%s/defaultObjectAcl" % (name,) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + else: + client._get_resource.assert_not_called() def test_make_public_w_future(self): self._make_public_w_future_helper(default_object_acl_loaded=True) @@ -2782,119 +3172,153 @@ def save(self, client=None, timeout=None): (self._bucket, self._name, self._granted, client, timeout) ) - def item_to_blob(self, item): - return _Blob(self.bucket, item["name"]) - - NAME = "name" - BLOB_NAME = "blob-name" + name = "name" + blob_name = "blob-name" permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - after = {"acl": permissive, "defaultObjectAcl": []} - connection = _Connection(after, {"items": [{"name": BLOB_NAME}]}) - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + + patch_acl_response = {"acl": permissive, "defaultObjectAcl": []} + client = mock.Mock(spec=["list_blobs", "_patch_resource"]) + client._patch_resource.return_value = patch_acl_response + + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True - with mock.patch("google.cloud.storage.client._item_to_blob", new=item_to_blob): - bucket.make_public(recursive=True, timeout=42, retry=DEFAULT_RETRY) + list_blobs_response = iter([_Blob(bucket, blob_name)]) + client.list_blobs.return_value = list_blobs_response + + timeout = 42 + + bucket.make_public(recursive=True, timeout=timeout) self.assertEqual(list(bucket.acl), permissive) self.assertEqual(list(bucket.default_object_acl), []) - self.assertEqual(_saved, [(bucket, BLOB_NAME, True, None, 42)]) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": permissive}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual(kw[1]["method"], "GET") - self.assertEqual(kw[1]["path"], "/b/%s/o" % NAME) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY) - max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1 - self.assertEqual( - kw[1]["query_params"], {"maxResults": max_results, "projection": "full"} + self.assertEqual(_saved, [(bucket, blob_name, True, None, timeout)]) + + expected_patch_data = {"acl": permissive} + expected_patch_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + bucket.path, + expected_patch_data, + query_params=expected_patch_query_params, + timeout=timeout, + retry=None, ) - self.assertEqual(kw[1]["timeout"], 42) + client.list_blobs.assert_called_once() def test_make_public_recursive_too_many(self): from google.cloud.storage.acl import _ACLEntity - PERMISSIVE = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - AFTER = {"acl": PERMISSIVE, "defaultObjectAcl": []} + permissive = [{"entity": "allUsers", "role": _ACLEntity.READER_ROLE}] - NAME = "name" - BLOB_NAME1 = "blob-name1" - BLOB_NAME2 = "blob-name2" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} - connection = _Connection(AFTER, GET_BLOBS_RESP) - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + name = "name" + blob1 = mock.Mock(spec=[]) + blob2 = mock.Mock(spec=[]) + patch_acl_response = {"acl": permissive, "defaultObjectAcl": []} + list_blobs_response = iter([blob1, blob2]) + client = mock.Mock(spec=["list_blobs", "_patch_resource"]) + client.list_blobs.return_value = list_blobs_response + client._patch_resource.return_value = patch_acl_response + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True # Make the Bucket refuse to make_public with 2 objects. bucket._MAX_OBJECTS_FOR_ITERATION = 1 - self.assertRaises(ValueError, bucket.make_public, recursive=True) + + with self.assertRaises(ValueError): + bucket.make_public(recursive=True) + + expected_path = bucket.path + expected_data = {"acl": permissive} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, + ) + + client.list_blobs.assert_called_once() def test_make_private_defaults(self): - NAME = "name" + name = "name" no_permissions = [] - after = {"acl": no_permissions, "defaultObjectAcl": []} - connection = _Connection(after) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + api_response = {"acl": no_permissions, "defaultObjectAcl": []} + client = mock.Mock(spec=["_patch_resource"]) + client._patch_resource.return_value = api_response + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True + bucket.make_private() + self.assertEqual(list(bucket.acl), no_permissions) self.assertEqual(list(bucket.default_object_acl), []) - kw = connection._requested - self.assertEqual(len(kw), 1) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": after["acl"]}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + + expected_path = bucket.path + expected_data = {"acl": no_permissions} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, + ) def _make_private_w_future_helper(self, default_object_acl_loaded=True): - NAME = "name" + name = "name" no_permissions = [] - after1 = {"acl": no_permissions, "defaultObjectAcl": []} - after2 = {"acl": no_permissions, "defaultObjectAcl": no_permissions} - if default_object_acl_loaded: - num_requests = 2 - connection = _Connection(after1, after2) - else: - num_requests = 3 - # We return the same value for default_object_acl.reload() - # to consume. - connection = _Connection(after1, after1, after2) - client = _Client(connection) - bucket = self._make_one(client=client, name=NAME) + get_api_response = {"items": []} + acl_patched_response = {"acl": no_permissions, "defaultObjectAcl": []} + dac_patched_response = { + "acl": no_permissions, + "defaultObjectAcl": no_permissions, + } + client = mock.Mock(spec=["_get_resource", "_patch_resource"]) + client._get_resource.return_value = get_api_response + client._patch_resource.side_effect = [ + acl_patched_response, + dac_patched_response, + ] + + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = default_object_acl_loaded + bucket.make_private(future=True) + self.assertEqual(list(bucket.acl), no_permissions) self.assertEqual(list(bucket.default_object_acl), no_permissions) - kw = connection._requested - self.assertEqual(len(kw), num_requests) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": no_permissions}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], self._get_default_timeout()) + + self.assertEqual(len(client._patch_resource.call_args_list), 2) + expected_acl_data = {"acl": no_permissions} + expected_dac_data = {"defaultObjectAcl": no_permissions} + expected_kw = { + "query_params": {"projection": "full"}, + "timeout": self._get_default_timeout(), + "retry": None, + } + client._patch_resource.assert_has_calls( + [ + ((bucket.path, expected_acl_data), expected_kw), + ((bucket.path, expected_dac_data), expected_kw), + ] + ) + if not default_object_acl_loaded: - self.assertEqual(kw[1]["method"], "GET") - self.assertEqual(kw[1]["path"], "/b/%s/defaultObjectAcl" % NAME) - # Last could be 1 or 2 depending on `default_object_acl_loaded`. - self.assertEqual(kw[-1]["method"], "PATCH") - self.assertEqual(kw[-1]["path"], "/b/%s" % NAME) - self.assertEqual(kw[-1]["data"], {"defaultObjectAcl": no_permissions}) - self.assertEqual(kw[-1]["query_params"], {"projection": "full"}) - self.assertEqual(kw[-1]["timeout"], self._get_default_timeout()) + expected_path = "/b/%s/defaultObjectAcl" % (name,) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + else: + client._get_resource.assert_not_called() def test_make_private_w_future(self): self._make_private_w_future_helper(default_object_acl_loaded=True) @@ -2928,150 +3352,83 @@ def save(self, client=None, timeout=None): (self._bucket, self._name, self._granted, client, timeout) ) - def item_to_blob(self, item): - return _Blob(self.bucket, item["name"]) - - NAME = "name" - BLOB_NAME = "blob-name" + name = "name" + blob_name = "blob-name" no_permissions = [] - after = {"acl": no_permissions, "defaultObjectAcl": []} - connection = _Connection(after, {"items": [{"name": BLOB_NAME}]}) - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + + patch_acl_response = {"acl": no_permissions, "defaultObjectAcl": []} + client = mock.Mock(spec=["list_blobs", "_patch_resource"]) + client._patch_resource.return_value = patch_acl_response + + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True - with mock.patch("google.cloud.storage.client._item_to_blob", new=item_to_blob): - bucket.make_private(recursive=True, timeout=42, retry=DEFAULT_RETRY) + list_blobs_response = iter([_Blob(bucket, blob_name)]) + client.list_blobs.return_value = list_blobs_response + + timeout = 42 + + bucket.make_private(recursive=True, timeout=42) + self.assertEqual(list(bucket.acl), no_permissions) self.assertEqual(list(bucket.default_object_acl), []) - self.assertEqual(_saved, [(bucket, BLOB_NAME, False, None, 42)]) - kw = connection._requested - self.assertEqual(len(kw), 2) - self.assertEqual(kw[0]["method"], "PATCH") - self.assertEqual(kw[0]["path"], "/b/%s" % NAME) - self.assertEqual(kw[0]["data"], {"acl": no_permissions}) - self.assertEqual(kw[0]["query_params"], {"projection": "full"}) - self.assertEqual(kw[0]["timeout"], 42) - self.assertEqual(kw[1]["method"], "GET") - self.assertEqual(kw[1]["path"], "/b/%s/o" % NAME) - self.assertEqual(kw[1]["retry"], DEFAULT_RETRY) - max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1 - self.assertEqual( - kw[1]["query_params"], {"maxResults": max_results, "projection": "full"} + self.assertEqual(_saved, [(bucket, blob_name, False, None, timeout)]) + + expected_patch_data = {"acl": no_permissions} + expected_patch_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + bucket.path, + expected_patch_data, + query_params=expected_patch_query_params, + timeout=timeout, + retry=None, ) - self.assertEqual(kw[1]["timeout"], 42) + + client.list_blobs.assert_called_once() def test_make_private_recursive_too_many(self): - NO_PERMISSIONS = [] - AFTER = {"acl": NO_PERMISSIONS, "defaultObjectAcl": []} + no_permissions = [] - NAME = "name" - BLOB_NAME1 = "blob-name1" - BLOB_NAME2 = "blob-name2" - GET_BLOBS_RESP = {"items": [{"name": BLOB_NAME1}, {"name": BLOB_NAME2}]} - connection = _Connection(AFTER, GET_BLOBS_RESP) - client = self._make_client() - client._base_connection = connection - bucket = self._make_one(client=client, name=NAME) + name = "name" + blob1 = mock.Mock(spec=[]) + blob2 = mock.Mock(spec=[]) + patch_acl_response = {"acl": no_permissions, "defaultObjectAcl": []} + list_blobs_response = iter([blob1, blob2]) + client = mock.Mock(spec=["list_blobs", "_patch_resource"]) + client.list_blobs.return_value = list_blobs_response + client._patch_resource.return_value = patch_acl_response + bucket = self._make_one(client=client, name=name) bucket.acl.loaded = True bucket.default_object_acl.loaded = True # Make the Bucket refuse to make_private with 2 objects. bucket._MAX_OBJECTS_FOR_ITERATION = 1 - self.assertRaises(ValueError, bucket.make_private, recursive=True) - - def test_page_empty_response(self): - from google.api_core import page_iterator - - connection = _Connection() - client = self._make_client() - client._base_connection = connection - name = "name" - bucket = self._make_one(client=client, name=name) - iterator = bucket.list_blobs() - page = page_iterator.Page(iterator, (), None) - iterator._page = page - blobs = list(page) - self.assertEqual(blobs, []) - self.assertEqual(iterator.prefixes, set()) - - def test_page_non_empty_response(self): - import six - from google.cloud.storage.blob import Blob - blob_name = "blob-name" - response = {"items": [{"name": blob_name}], "prefixes": ["foo"]} - connection = _Connection() - client = self._make_client() - client._base_connection = connection - name = "name" - bucket = self._make_one(client=client, name=name) - - def fake_response(): - return response - - iterator = bucket.list_blobs() - iterator._get_next_page_response = fake_response - - page = six.next(iterator.pages) - self.assertEqual(page.prefixes, ("foo",)) - self.assertEqual(page.num_items, 1) - blob = six.next(page) - self.assertEqual(page.remaining, 0) - self.assertIsInstance(blob, Blob) - self.assertEqual(blob.name, blob_name) - self.assertEqual(iterator.prefixes, set(["foo"])) - - def test_cumulative_prefixes(self): - import six - from google.cloud.storage.blob import Blob - - BLOB_NAME = "blob-name1" - response1 = { - "items": [{"name": BLOB_NAME}], - "prefixes": ["foo"], - "nextPageToken": "s39rmf9", - } - response2 = {"items": [], "prefixes": ["bar"]} - client = self._make_client() - name = "name" - bucket = self._make_one(client=client, name=name) - responses = [response1, response2] + with self.assertRaises(ValueError): + bucket.make_private(recursive=True) + + expected_path = bucket.path + expected_data = {"acl": no_permissions} + expected_query_params = {"projection": "full"} + client._patch_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=None, + ) - def fake_response(): - return responses.pop(0) + client.list_blobs.assert_called_once() - iterator = bucket.list_blobs() - iterator._get_next_page_response = fake_response - - # Parse first response. - pages_iter = iterator.pages - page1 = six.next(pages_iter) - self.assertEqual(page1.prefixes, ("foo",)) - self.assertEqual(page1.num_items, 1) - blob = six.next(page1) - self.assertEqual(page1.remaining, 0) - self.assertIsInstance(blob, Blob) - self.assertEqual(blob.name, BLOB_NAME) - self.assertEqual(iterator.prefixes, set(["foo"])) - # Parse second response. - page2 = six.next(pages_iter) - self.assertEqual(page2.prefixes, ("bar",)) - self.assertEqual(page2.num_items, 0) - self.assertEqual(iterator.prefixes, set(["foo", "bar"])) - - def _test_generate_upload_policy_helper(self, **kwargs): + def _generate_upload_policy_helper(self, **kwargs): import base64 import json credentials = _create_signing_credentials() credentials.signer_email = mock.sentinel.signer_email credentials.sign_bytes.return_value = b"DEADBEEF" - connection = _Connection() - connection.credentials = credentials - client = _Client(connection) + client = self._make_client(_credentials=credentials) name = "name" bucket = self._make_one(client=client, name=name) @@ -3108,7 +3465,7 @@ def _test_generate_upload_policy_helper(self, **kwargs): def test_generate_upload_policy(self, now): from google.cloud._helpers import _datetime_to_rfc3339 - _, policy = self._test_generate_upload_policy_helper() + _, policy = self._generate_upload_policy_helper() self.assertEqual( policy["expiration"], @@ -3120,15 +3477,13 @@ def test_generate_upload_policy_args(self): expiration = datetime.datetime(1990, 5, 29) - _, policy = self._test_generate_upload_policy_helper(expiration=expiration) + _, policy = self._generate_upload_policy_helper(expiration=expiration) self.assertEqual(policy["expiration"], _datetime_to_rfc3339(expiration)) def test_generate_upload_policy_bad_credentials(self): credentials = object() - connection = _Connection() - connection.credentials = credentials - client = _Client(connection) + client = self._make_client(_credentials=credentials) name = "name" bucket = self._make_one(client=client, name=name) @@ -3136,10 +3491,7 @@ def test_generate_upload_policy_bad_credentials(self): bucket.generate_upload_policy([]) def test_lock_retention_policy_no_policy_set(self): - credentials = object() - connection = _Connection() - connection.credentials = credentials - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) name = "name" bucket = self._make_one(client=client, name=name) bucket._properties["metageneration"] = 1234 @@ -3147,11 +3499,10 @@ def test_lock_retention_policy_no_policy_set(self): with self.assertRaises(ValueError): bucket.lock_retention_policy() + client._post_resource.assert_not_called() + def test_lock_retention_policy_no_metageneration(self): - credentials = object() - connection = _Connection() - connection.credentials = credentials - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) name = "name" bucket = self._make_one(client=client, name=name) bucket._properties["retentionPolicy"] = { @@ -3162,11 +3513,10 @@ def test_lock_retention_policy_no_metageneration(self): with self.assertRaises(ValueError): bucket.lock_retention_policy() + client._post_resource.assert_not_called() + def test_lock_retention_policy_already_locked(self): - credentials = object() - connection = _Connection() - connection.credentials = credentials - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) name = "name" bucket = self._make_one(client=client, name=name) bucket._properties["metageneration"] = 1234 @@ -3179,74 +3529,92 @@ def test_lock_retention_policy_already_locked(self): with self.assertRaises(ValueError): bucket.lock_retention_policy() - def test_lock_retention_policy_ok(self): + client._post_resource.assert_not_called() + + def test_lock_retention_policy_ok_w_timeout_w_retry(self): name = "name" - response = { + effective_time = "2018-03-01T16:46:27.123456Z" + one_hundred_days = 86400 * 100 # seconds in 100 days + metageneration = 1234 + api_response = { "name": name, - "metageneration": 1235, + "metageneration": metageneration + 1, "retentionPolicy": { - "effectiveTime": "2018-03-01T16:46:27.123456Z", + "effectiveTime": effective_time, "isLocked": True, - "retentionPeriod": 86400 * 100, # 100 days + "retentionPeriod": one_hundred_days, }, } - credentials = object() - connection = _Connection(response) - connection.credentials = credentials - client = _Client(connection) + metageneration = 1234 + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = self._make_one(client=client, name=name) - bucket._properties["metageneration"] = 1234 + bucket._properties["metageneration"] = metageneration bucket._properties["retentionPolicy"] = { - "effectiveTime": "2018-03-01T16:46:27.123456Z", - "retentionPeriod": 86400 * 100, # 100 days + "effectiveTime": effective_time, + "retentionPeriod": one_hundred_days, } - - bucket.lock_retention_policy(timeout=42) - - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], "/b/{}/lockRetentionPolicy".format(name)) - self.assertEqual(kw["query_params"], {"ifMetagenerationMatch": 1234}) - self.assertEqual(kw["timeout"], 42) + timeout = 42 + retry = mock.Mock(spec=[]) + + bucket.lock_retention_policy(timeout=timeout, retry=retry) + + expected_path = "/b/{}/lockRetentionPolicy".format(name) + expected_data = None + expected_query_params = {"ifMetagenerationMatch": metageneration} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + _target_object=bucket, + ) def test_lock_retention_policy_w_user_project(self): name = "name" user_project = "user-project-123" - response = { + metageneration = 1234 + effective_time = "2018-03-01T16:46:27.123456Z" + one_hundred_days = 86400 * 100 # seconds in 100 days + api_response = { "name": name, - "metageneration": 1235, + "metageneration": metageneration + 1, "retentionPolicy": { - "effectiveTime": "2018-03-01T16:46:27.123456Z", + "effectiveTime": effective_time, "isLocked": True, - "retentionPeriod": 86400 * 100, # 100 days + "retentionPeriod": one_hundred_days, }, } - credentials = object() - connection = _Connection(response) - connection.credentials = credentials - client = _Client(connection) + client = mock.Mock(spec=["_post_resource"]) + client._post_resource.return_value = api_response bucket = self._make_one(client=client, name=name, user_project=user_project) bucket._properties["metageneration"] = 1234 bucket._properties["retentionPolicy"] = { - "effectiveTime": "2018-03-01T16:46:27.123456Z", - "retentionPeriod": 86400 * 100, # 100 days + "effectiveTime": effective_time, + "retentionPeriod": one_hundred_days, } bucket.lock_retention_policy() - (kw,) = connection._requested - self.assertEqual(kw["method"], "POST") - self.assertEqual(kw["path"], "/b/{}/lockRetentionPolicy".format(name)) - self.assertEqual( - kw["query_params"], - {"ifMetagenerationMatch": 1234, "userProject": user_project}, + expected_path = "/b/{}/lockRetentionPolicy".format(name) + expected_data = None + expected_query_params = { + "ifMetagenerationMatch": metageneration, + "userProject": user_project, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, ) - self.assertEqual(kw["timeout"], self._get_default_timeout()) def test_generate_signed_url_w_invalid_version(self): expiration = "2014-10-16T20:34:37.000Z" - connection = _Connection() - client = _Client(connection) + client = self._make_client() bucket = self._make_one(name="bucket_name", client=client) with self.assertRaises(ValueError): bucket.generate_signed_url(expiration, version="nonesuch") @@ -3282,8 +3650,7 @@ def _generate_signed_url_helper( if expiration is None: expiration = datetime.datetime.utcnow().replace(tzinfo=UTC) + delta - connection = _Connection() - client = _Client(connection) + client = self._make_client(_credentials=credentials) bucket = self._make_one(name=bucket_name, client=client) if version is None: @@ -3343,11 +3710,12 @@ def _generate_signed_url_helper( def test_get_bucket_from_string_w_valid_uri(self): from google.cloud.storage.bucket import Bucket - connection = _Connection() - client = _Client(connection) + client = self._make_client() BUCKET_NAME = "BUCKET_NAME" uri = "gs://" + BUCKET_NAME + bucket = Bucket.from_string(uri, client) + self.assertIsInstance(bucket, Bucket) self.assertIs(bucket.client, client) self.assertEqual(bucket.name, BUCKET_NAME) @@ -3355,8 +3723,7 @@ def test_get_bucket_from_string_w_valid_uri(self): def test_get_bucket_from_string_w_invalid_uri(self): from google.cloud.storage.bucket import Bucket - connection = _Connection() - client = _Client(connection) + client = self._make_client() with pytest.raises(ValueError, match="URI scheme must be gs"): Bucket.from_string("http://bucket_name", client) @@ -3364,11 +3731,12 @@ def test_get_bucket_from_string_w_invalid_uri(self): def test_get_bucket_from_string_w_domain_name_bucket(self): from google.cloud.storage.bucket import Bucket - connection = _Connection() - client = _Client(connection) + client = self._make_client() BUCKET_NAME = "buckets.example.com" uri = "gs://" + BUCKET_NAME + bucket = Bucket.from_string(uri, client) + self.assertIsInstance(bucket, Bucket) self.assertIs(bucket.client, client) self.assertEqual(bucket.name, BUCKET_NAME) @@ -3474,51 +3842,32 @@ def test_generate_signed_url_v4_w_bucket_bound_hostname_w_bare_hostname(self): self._generate_signed_url_v4_helper(bucket_bound_hostname="cdn.example.com") -class _Connection(object): - _delete_bucket = False - - def __init__(self, *responses): - self._responses = responses - self._requested = [] - self._deleted_buckets = [] - self.credentials = None - - @staticmethod - def _is_bucket_path(path): - # Now just ensure the path only has /b/ and one more segment. - return path.startswith("/b/") and path.count("/") == 2 - - def api_request(self, **kw): - from google.cloud.exceptions import NotFound +class Test__item_to_notification(unittest.TestCase): + def _call_fut(self, iterator, item): + from google.cloud.storage.bucket import _item_to_notification - self._requested.append(kw) - - method = kw.get("method") - path = kw.get("path", "") - if method == "DELETE" and self._is_bucket_path(path): - self._deleted_buckets.append(kw) - if self._delete_bucket: - return - else: - raise NotFound("miss") - - try: - response, self._responses = self._responses[0], self._responses[1:] - except IndexError: - raise NotFound("miss") - else: - return response + return _item_to_notification(iterator, item) + def test_it(self): + from google.cloud.storage.notification import BucketNotification + from google.cloud.storage.notification import _TOPIC_REF_FMT + from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT -class _Client(object): - def __init__(self, connection, project=None): - self._base_connection = connection - self.project = project + iterator = mock.Mock(spec=["bucket"]) + project = "my-project-123" + topic = "topic-1" + item = { + "topic": _TOPIC_REF_FMT.format(project, topic), + "id": "1", + "etag": "DEADBEEF", + "selfLink": "https://example.com/notification/1", + "payload_format": NONE_PAYLOAD_FORMAT, + } - @property - def _connection(self): - return self._base_connection + notification = self._call_fut(iterator, item) - @property - def _credentials(self): - return self._base_connection.credentials + self.assertIsInstance(notification, BucketNotification) + self.assertIs(notification._bucket, iterator.bucket) + self.assertEqual(notification._topic_name, topic) + self.assertEqual(notification._topic_project, project) + self.assertEqual(notification._properties, item) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index df780c786..33ec331d6 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -29,6 +29,7 @@ from . import _read_local_json from google.cloud.storage.retry import DEFAULT_RETRY +from google.cloud.storage.retry import DEFAULT_RETRY_IF_GENERATION_SPECIFIED _SERVICE_ACCOUNT_JSON = _read_local_json("url_signer_v4_test_account.json") @@ -39,9 +40,12 @@ _FAKE_CREDENTIALS = Credentials.from_service_account_info(_SERVICE_ACCOUNT_JSON) -def _make_credentials(): +def _make_credentials(project=None): import google.auth.credentials + if project is not None: + return mock.Mock(spec=google.auth.credentials.Credentials, project_id=project) + return mock.Mock(spec=google.auth.credentials.Credentials) @@ -174,14 +178,9 @@ def test_ctor_wo_project(self): from google.cloud.storage._http import Connection PROJECT = "PROJECT" - credentials = _make_credentials() - - ddp_patch = mock.patch( - "google.cloud.client._determine_default_project", return_value=PROJECT - ) + credentials = _make_credentials(project=PROJECT) - with ddp_patch: - client = self._make_one(credentials=credentials) + client = self._make_one(credentials=credentials) self.assertEqual(client.project, PROJECT) self.assertIsInstance(client._connection, Connection) @@ -221,7 +220,8 @@ def test_ctor_w_client_info(self): self.assertIs(client._connection._client_info, client_info) def test_ctor_mtls(self): - credentials = _make_credentials() + PROJECT = "PROJECT" + credentials = _make_credentials(project=PROJECT) client = self._make_one(credentials=credentials) self.assertEqual(client._connection.ALLOW_AUTO_SWITCH_TO_MTLS_URL, True) @@ -411,339 +411,675 @@ def test_batch(self): self.assertIsInstance(batch, Batch) self.assertIs(batch._client, client) - def test_get_bucket_with_string_miss(self): + def test__get_resource_miss_w_defaults(self): from google.cloud.exceptions import NotFound - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + project = "PROJECT" + path = "/path/to/something" + credentials = _make_credentials() - NONESUCH = "nonesuch" - http = _make_requests_session( - [_make_json_response({}, status=http_client.NOT_FOUND)] - ) - client._http_internal = http + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() with self.assertRaises(NotFound): - client.get_bucket(NONESUCH, timeout=42) + client._get_resource(path) - http.request.assert_called_once_with( - method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY, timeout=42 - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", NONESUCH]), + connection.api_request.assert_called_once_with( + method="GET", + path=path, + query_params=None, + headers=None, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_get_bucket_with_string_hit(self): - from google.cloud.storage.bucket import Bucket + def test__get_resource_hit_w_explicit(self): + project = "PROJECT" + path = "/path/to/something" + query_params = {"foo": "Foo"} + headers = {"bar": "Bar"} + timeout = 100 + retry = mock.Mock(spec=[]) + credentials = _make_credentials() - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + client = self._make_one(project=project, credentials=credentials) + expected = mock.Mock(spec={}) + connection = client._base_connection = _make_connection(expected) + target = mock.Mock(spec={}) - BUCKET_NAME = "bucket-name" - data = {"name": BUCKET_NAME} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http + found = client._get_resource( + path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) - bucket = client.get_bucket(BUCKET_NAME) + self.assertIs(found, expected) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) - http.request.assert_called_once_with( + connection.api_request.assert_called_once_with( method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=self._get_default_timeout(), - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", BUCKET_NAME]), + path=path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_get_bucket_with_metageneration_match(self): - from google.cloud.storage.bucket import Bucket + def test__list_resource_w_defaults(self): + import functools + from google.api_core.page_iterator import HTTPIterator + from google.api_core.page_iterator import _do_nothing_page_start - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - METAGENERATION_NUMBER = 6 - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + project = "PROJECT" + path = "/path/to/list/resource" + item_to_value = mock.Mock(spec=[]) + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() + + iterator = client._list_resource(path=path, item_to_value=item_to_value,) + + self.assertIsInstance(iterator, HTTPIterator) + self.assertIs(iterator.client, client) + self.assertIsInstance(iterator.api_request, functools.partial) + self.assertIs(iterator.api_request.func, connection.api_request) + self.assertEqual(iterator.api_request.args, ()) + expected_keywords = { + "timeout": self._get_default_timeout(), + "retry": DEFAULT_RETRY, + } + self.assertEqual(iterator.api_request.keywords, expected_keywords) + self.assertEqual(iterator.path, path) + self.assertEqual(iterator.next_page_token, None) + self.assertEqual(iterator.max_results, None) + self.assertIs(iterator._page_start, _do_nothing_page_start) - BUCKET_NAME = "bucket-name" - data = {"name": BUCKET_NAME} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http + def test__list_resource_w_explicit(self): + import functools + from google.api_core.page_iterator import HTTPIterator - bucket = client.get_bucket( - BUCKET_NAME, if_metageneration_match=METAGENERATION_NUMBER - ) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=self._get_default_timeout(), - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", BUCKET_NAME]), - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["ifMetagenerationMatch"], str(METAGENERATION_NUMBER)) - self.assertEqual(parms["projection"], "noAcl") + project = "PROJECT" + path = "/path/to/list/resource" + item_to_value = mock.Mock(spec=[]) + page_token = "PAGE-TOKEN" + max_results = 47 + extra_params = {"foo": "Foo"} + page_start = mock.Mock(spec=[]) + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() + + iterator = client._list_resource( + path=path, + item_to_value=item_to_value, + page_token=page_token, + max_results=max_results, + extra_params=extra_params, + page_start=page_start, + ) + + self.assertIsInstance(iterator, HTTPIterator) + self.assertIs(iterator.client, client) + self.assertIsInstance(iterator.api_request, functools.partial) + self.assertIs(iterator.api_request.func, connection.api_request) + self.assertEqual(iterator.api_request.args, ()) + expected_keywords = { + "timeout": self._get_default_timeout(), + "retry": DEFAULT_RETRY, + } + self.assertEqual(iterator.api_request.keywords, expected_keywords) + self.assertEqual(iterator.path, path) + self.assertEqual(iterator.next_page_token, page_token) + self.assertEqual(iterator.max_results, max_results) + self.assertIs(iterator._page_start, page_start) - def test_get_bucket_with_object_miss(self): + def test__patch_resource_miss_w_defaults(self): from google.cloud.exceptions import NotFound - from google.cloud.storage.bucket import Bucket project = "PROJECT" + path = "/path/to/something" credentials = _make_credentials() - client = self._make_one(project=project, credentials=credentials) + data = {"baz": "Baz"} - nonesuch = "nonesuch" - bucket_obj = Bucket(client, nonesuch) - http = _make_requests_session( - [_make_json_response({}, status=http_client.NOT_FOUND)] - ) - client._http_internal = http + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() with self.assertRaises(NotFound): - client.get_bucket(bucket_obj) + client._patch_resource(path, data) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, + connection.api_request.assert_called_once_with( + method="PATCH", + path=path, + data=data, + query_params=None, + headers=None, timeout=self._get_default_timeout(), + retry=None, + _target_object=None, ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( + + def test__patch_resource_hit_w_explicit(self): + project = "PROJECT" + path = "/path/to/something" + data = {"baz": "Baz"} + query_params = {"foo": "Foo"} + headers = {"bar": "Bar"} + timeout = 100 + retry = mock.Mock(spec=[]) + credentials = _make_credentials() + + client = self._make_one(project=project, credentials=credentials) + expected = mock.Mock(spec={}) + connection = client._base_connection = _make_connection(expected) + target = mock.Mock(spec={}) + + found = client._patch_resource( path, - "/".join(["", "storage", client._connection.API_VERSION, "b", nonesuch]), + data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_get_bucket_with_object_hit(self): - from google.cloud.storage.bucket import Bucket + self.assertIs(found, expected) + + connection.api_request.assert_called_once_with( + method="PATCH", + path=path, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) + + def test__put_resource_miss_w_defaults(self): + from google.cloud.exceptions import NotFound project = "PROJECT" + path = "/path/to/something" credentials = _make_credentials() - client = self._make_one(project=project, credentials=credentials) + data = {"baz": "Baz"} - bucket_name = "bucket-name" - bucket_obj = Bucket(client, bucket_name) - data = {"name": bucket_name} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() - bucket = client.get_bucket(bucket_obj) + with self.assertRaises(NotFound): + client._put_resource(path, data) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, bucket_name) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, + connection.api_request.assert_called_once_with( + method="PUT", + path=path, + data=data, + query_params=None, + headers=None, timeout=self._get_default_timeout(), + retry=None, + _target_object=None, ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", bucket_name]), - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_get_bucket_default_retry(self): - from google.cloud.storage.bucket import Bucket - from google.cloud.storage._http import Connection + def test__put_resource_hit_w_explicit(self): + project = "PROJECT" + path = "/path/to/something" + data = {"baz": "Baz"} + query_params = {"foo": "Foo"} + headers = {"bar": "Bar"} + timeout = 100 + retry = mock.Mock(spec=[]) + credentials = _make_credentials() - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + client = self._make_one(project=project, credentials=credentials) + expected = mock.Mock(spec={}) + connection = client._base_connection = _make_connection(expected) + target = mock.Mock(spec={}) - bucket_name = "bucket-name" - bucket_obj = Bucket(client, bucket_name) + found = client._put_resource( + path, + data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) - with mock.patch.object(Connection, "api_request") as req: - client.get_bucket(bucket_obj) + self.assertIs(found, expected) - req.assert_called_once_with( - method="GET", - path=mock.ANY, - query_params=mock.ANY, - headers=mock.ANY, - _target_object=bucket_obj, - timeout=mock.ANY, - retry=DEFAULT_RETRY, + connection.api_request.assert_called_once_with( + method="PUT", + path=path, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, ) - def test_get_bucket_respects_retry_override(self): - from google.cloud.storage.bucket import Bucket - from google.cloud.storage._http import Connection + def test__post_resource_miss_w_defaults(self): + from google.cloud.exceptions import NotFound - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + project = "PROJECT" + path = "/path/to/something" + credentials = _make_credentials() + data = {"baz": "Baz"} - bucket_name = "bucket-name" - bucket_obj = Bucket(client, bucket_name) + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() - with mock.patch.object(Connection, "api_request") as req: - client.get_bucket(bucket_obj, retry=None) + with self.assertRaises(NotFound): + client._post_resource(path, data) - req.assert_called_once_with( - method="GET", - path=mock.ANY, - query_params=mock.ANY, - headers=mock.ANY, - _target_object=bucket_obj, - timeout=mock.ANY, + connection.api_request.assert_called_once_with( + method="POST", + path=path, + data=data, + query_params=None, + headers=None, + timeout=self._get_default_timeout(), retry=None, + _target_object=None, ) - def test_lookup_bucket_miss(self): - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + def test__post_resource_hit_w_explicit(self): + project = "PROJECT" + path = "/path/to/something" + data = {"baz": "Baz"} + query_params = {"foo": "Foo"} + headers = {"bar": "Bar"} + timeout = 100 + retry = mock.Mock(spec=[]) + credentials = _make_credentials() - NONESUCH = "nonesuch" - http = _make_requests_session( - [_make_json_response({}, status=http_client.NOT_FOUND)] + client = self._make_one(project=project, credentials=credentials) + expected = mock.Mock(spec={}) + connection = client._base_connection = _make_connection(expected) + target = mock.Mock(spec={}) + + found = client._post_resource( + path, + data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, ) - client._http_internal = http - bucket = client.lookup_bucket(NONESUCH, timeout=42) + self.assertIs(found, expected) - self.assertIsNone(bucket) - http.request.assert_called_once_with( - method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY, timeout=42 - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", NONESUCH]), + connection.api_request.assert_called_once_with( + method="POST", + path=path, + data=data, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_lookup_bucket_hit(self): - from google.cloud.storage.bucket import Bucket + def test__delete_resource_miss_w_defaults(self): + from google.cloud.exceptions import NotFound - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + project = "PROJECT" + path = "/path/to/something" + credentials = _make_credentials() - BUCKET_NAME = "bucket-name" - data = {"name": BUCKET_NAME} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http + client = self._make_one(project=project, credentials=credentials) + connection = client._base_connection = _make_connection() - bucket = client.lookup_bucket(BUCKET_NAME) + with self.assertRaises(NotFound): + client._delete_resource(path) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, + connection.api_request.assert_called_once_with( + method="DELETE", + path=path, + query_params=None, + headers=None, timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=None, ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( + + def test__delete_resource_hit_w_explicit(self): + project = "PROJECT" + path = "/path/to/something" + query_params = {"foo": "Foo"} + headers = {"bar": "Bar"} + timeout = 100 + retry = mock.Mock(spec=[]) + credentials = _make_credentials() + + client = self._make_one(project=project, credentials=credentials) + expected = mock.Mock(spec={}) + connection = client._base_connection = _make_connection(expected) + target = mock.Mock(spec={}) + + found = client._delete_resource( path, - "/".join(["", "storage", client._connection.API_VERSION, "b", BUCKET_NAME]), + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - def test_lookup_bucket_with_metageneration_match(self): - from google.cloud.storage.bucket import Bucket + self.assertIs(found, expected) - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - METAGENERATION_NUMBER = 6 - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + connection.api_request.assert_called_once_with( + method="DELETE", + path=path, + query_params=query_params, + headers=headers, + timeout=timeout, + retry=retry, + _target_object=target, + ) - BUCKET_NAME = "bucket-name" - data = {"name": BUCKET_NAME} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http + def test_get_bucket_miss_w_string_w_defaults(self): + from google.cloud.exceptions import NotFound + from google.cloud.storage.bucket import Bucket - bucket = client.lookup_bucket( - BUCKET_NAME, if_metageneration_match=METAGENERATION_NUMBER - ) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, BUCKET_NAME) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, + project = "PROJECT" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock() + client._get_resource.side_effect = NotFound("testing") + bucket_name = "nonesuch" + + with self.assertRaises(NotFound): + client.get_bucket(bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=mock.ANY, ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join(["", "storage", client._connection.API_VERSION, "b", BUCKET_NAME]), - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["projection"], "noAcl") - self.assertEqual(parms["ifMetagenerationMatch"], str(METAGENERATION_NUMBER)) - def test_lookup_bucket_default_retry(self): + target = client._get_resource.call_args[1]["_target_object"] + self.assertIsInstance(target, Bucket) + self.assertEqual(target.name, bucket_name) + + def test_get_bucket_hit_w_string_w_timeout(self): from google.cloud.storage.bucket import Bucket - from google.cloud.storage._http import Connection - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + project = "PROJECT" + bucket_name = "bucket-name" + timeout = 42 + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) + + bucket = client.get_bucket(bucket_name, timeout=timeout) + + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=bucket, + ) + + def test_get_bucket_hit_w_string_w_metageneration_match(self): + from google.cloud.storage.bucket import Bucket + project = "PROJECT" bucket_name = "bucket-name" + metageneration_number = 6 + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) + + bucket = client.get_bucket( + bucket_name, if_metageneration_match=metageneration_number + ) + + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = { + "projection": "noAcl", + "ifMetagenerationMatch": metageneration_number, + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, + ) + + def test_get_bucket_miss_w_object_w_retry(self): + from google.cloud.exceptions import NotFound + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + bucket_name = "nonesuch" + retry = mock.Mock(spec=[]) + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(side_effect=NotFound("testing")) bucket_obj = Bucket(client, bucket_name) - with mock.patch.object(Connection, "api_request") as req: - client.lookup_bucket(bucket_obj) - req.assert_called_once_with( - method="GET", - path=mock.ANY, - query_params=mock.ANY, - headers=mock.ANY, - _target_object=bucket_obj, - timeout=mock.ANY, - retry=DEFAULT_RETRY, - ) + with self.assertRaises(NotFound): + client.get_bucket(bucket_obj, retry=retry) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=retry, + _target_object=mock.ANY, + ) + + target = client._get_resource.call_args[1]["_target_object"] + self.assertIsInstance(target, Bucket) + self.assertEqual(target.name, bucket_name) + + def test_get_bucket_hit_w_object_defaults(self): + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + bucket_name = "bucket-name" + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) + bucket_obj = Bucket(client, bucket_name) + + bucket = client.get_bucket(bucket_obj) + + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, + ) + + def test_get_bucket_hit_w_object_w_retry_none(self): + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + bucket_name = "bucket-name" + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) + bucket_obj = Bucket(client, bucket_name) + + bucket = client.get_bucket(bucket_obj, retry=None) + + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=None, + _target_object=bucket, + ) + + def test_lookup_bucket_miss_w_defaults(self): + from google.cloud.exceptions import NotFound + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + bucket_name = "nonesuch" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(side_effect=NotFound("testing")) + + bucket = client.lookup_bucket(bucket_name) + + self.assertIsNone(bucket) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=mock.ANY, + ) + + target = client._get_resource.call_args[1]["_target_object"] + self.assertIsInstance(target, Bucket) + self.assertEqual(target.name, bucket_name) + + def test_lookup_bucket_hit_w_timeout(self): + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + bucket_name = "bucket-name" + timeout = 42 + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) + + bucket = client.lookup_bucket(bucket_name, timeout=timeout) + + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=timeout, + retry=DEFAULT_RETRY, + _target_object=bucket, + ) + + def test_lookup_bucket_hit_w_metageneration_match(self): + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + bucket_name = "bucket-name" + api_response = {"name": bucket_name} + credentials = _make_credentials() + metageneration_number = 6 + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) + + bucket = client.lookup_bucket( + bucket_name, if_metageneration_match=metageneration_number + ) + + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = { + "projection": "noAcl", + "ifMetagenerationMatch": metageneration_number, + } + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, + ) + + def test_lookup_bucket_hit_w_retry(self): + from google.cloud.storage.bucket import Bucket + + project = "PROJECT" + bucket_name = "bucket-name" + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._get_resource = mock.Mock(return_value=api_response) + bucket_obj = Bucket(client, bucket_name) + + bucket = client.lookup_bucket(bucket_obj, retry=None) + + self.assertIsInstance(bucket, Bucket) + self.assertEqual(bucket.name, bucket_name) + + expected_path = "/b/%s" % (bucket_name,) + expected_query_params = {"projection": "noAcl"} + expected_headers = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + headers=expected_headers, + timeout=self._get_default_timeout(), + retry=None, + _target_object=bucket, + ) def test_create_bucket_w_missing_client_project(self): credentials = _make_credentials() @@ -752,7 +1088,7 @@ def test_create_bucket_w_missing_client_project(self): with self.assertRaises(ValueError): client.create_bucket("bucket") - def test_create_bucket_w_conflict(self): + def test_create_bucket_w_conflict_w_user_project(self): from google.cloud.exceptions import Conflict project = "PROJECT" @@ -760,62 +1096,60 @@ def test_create_bucket_w_conflict(self): other_project = "OTHER_PROJECT" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - connection = _make_connection() - client._base_connection = connection - connection.api_request.side_effect = Conflict("testing") + client._post_resource = mock.Mock() + client._post_resource.side_effect = Conflict("testing") bucket_name = "bucket-name" - data = {"name": bucket_name} with self.assertRaises(Conflict): client.create_bucket( bucket_name, project=other_project, user_project=user_project ) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": other_project, "userProject": user_project}, - data=data, - _target_object=mock.ANY, + expected_path = "/b" + expected_data = {"name": bucket_name} + expected_query_params = { + "project": other_project, + "userProject": user_project, + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, + _target_object=mock.ANY, ) @mock.patch("warnings.warn") - def test_create_requester_pays_deprecated(self, mock_warn): + def test_create_bucket_w_requester_pays_deprecated(self, mock_warn): from google.cloud.storage.bucket import Bucket + bucket_name = "bucket-name" project = "PROJECT" credentials = _make_credentials() + api_respone = {"name": bucket_name, "billing": {"requesterPays": True}} client = self._make_one(project=project, credentials=credentials) - bucket_name = "bucket-name" - json_expected = {"name": bucket_name, "billing": {"requesterPays": True}} - http = _make_requests_session([_make_json_response(json_expected)]) - client._http_internal = http + client._post_resource = mock.Mock() + client._post_resource.return_value = api_respone bucket = client.create_bucket(bucket_name, requester_pays=True) self.assertIsInstance(bucket, Bucket) self.assertEqual(bucket.name, bucket_name) self.assertTrue(bucket.requester_pays) - http.request.assert_called_once_with( - method="POST", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=mock.ANY, - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]) + + expected_path = "/b" + expected_data = api_respone + expected_query_params = {"project": project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=mock.ANY, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], project) - json_sent = http.request.call_args_list[0][1]["data"] - self.assertEqual(json_expected, json.loads(json_sent)) mock_warn.assert_called_with( "requester_pays arg is deprecated. Use Bucket().requester_pays instead.", @@ -828,31 +1162,40 @@ def test_create_bucket_w_predefined_acl_invalid(self): bucket_name = "bucket-name" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() with self.assertRaises(ValueError): client.create_bucket(bucket_name, predefined_acl="bogus") - def test_create_bucket_w_predefined_acl_valid(self): + client._post_resource.assert_not_called() + + def test_create_bucket_w_predefined_acl_valid_w_timeout(self): project = "PROJECT" bucket_name = "bucket-name" - data = {"name": bucket_name} - + api_response = {"name": bucket_name} credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - connection = _make_connection(data) - client._base_connection = connection + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response + timeout = 42 + bucket = client.create_bucket( - bucket_name, predefined_acl="publicRead", timeout=42 + bucket_name, predefined_acl="publicRead", timeout=timeout, ) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": project, "predefinedAcl": "publicRead"}, - data=data, - _target_object=bucket, - timeout=42, + expected_path = "/b" + expected_data = api_response + expected_query_params = { + "project": project, + "predefinedAcl": "publicRead", + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, retry=DEFAULT_RETRY, + _target_object=bucket, ) def test_create_bucket_w_predefined_default_object_acl_invalid(self): @@ -861,93 +1204,98 @@ def test_create_bucket_w_predefined_default_object_acl_invalid(self): credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() with self.assertRaises(ValueError): client.create_bucket(bucket_name, predefined_default_object_acl="bogus") - def test_create_bucket_w_predefined_default_object_acl_valid(self): + client._post_resource.assert_not_called() + + def test_create_bucket_w_predefined_default_object_acl_valid_w_retry(self): project = "PROJECT" bucket_name = "bucket-name" - data = {"name": bucket_name} - + api_response = {"name": bucket_name} credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - connection = _make_connection(data) - client._base_connection = connection + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response + retry = mock.Mock(spec=[]) + bucket = client.create_bucket( - bucket_name, predefined_default_object_acl="publicRead" + bucket_name, predefined_default_object_acl="publicRead", retry=retry, ) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={ - "project": project, - "predefinedDefaultObjectAcl": "publicRead", - }, - data=data, - _target_object=bucket, + expected_path = "/b" + expected_data = api_response + expected_query_params = { + "project": project, + "predefinedDefaultObjectAcl": "publicRead", + } + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + retry=retry, + _target_object=bucket, ) def test_create_bucket_w_explicit_location(self): project = "PROJECT" bucket_name = "bucket-name" location = "us-central1" - data = {"location": location, "name": bucket_name} - - connection = _make_connection( - data, "{'location': 'us-central1', 'name': 'bucket-name'}" - ) - + api_response = {"location": location, "name": bucket_name} credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - client._base_connection = connection + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response bucket = client.create_bucket(bucket_name, location=location) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - data=data, - _target_object=bucket, - query_params={"project": project}, + self.assertEqual(bucket.location, location) + + expected_path = "/b" + expected_data = {"location": location, "name": bucket_name} + expected_query_params = {"project": project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, + _target_object=bucket, ) - self.assertEqual(bucket.location, location) def test_create_bucket_w_explicit_project(self): - from google.cloud.storage.client import Client - - PROJECT = "PROJECT" - OTHER_PROJECT = "other-project-123" - BUCKET_NAME = "bucket-name" - DATA = {"name": BUCKET_NAME} - connection = _make_connection(DATA) - - client = Client(project=PROJECT) - client._base_connection = connection - - bucket = client.create_bucket(BUCKET_NAME, project=OTHER_PROJECT) - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": OTHER_PROJECT}, - data=DATA, - _target_object=bucket, + project = "PROJECT" + other_project = "other-project-123" + bucket_name = "bucket-name" + api_response = {"name": bucket_name} + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response + + bucket = client.create_bucket(bucket_name, project=other_project) + + expected_path = "/b" + expected_data = api_response + expected_query_params = {"project": other_project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, + _target_object=bucket, ) - def test_create_w_extra_properties(self): - from google.cloud.storage.client import Client + def test_create_bucket_w_extra_properties(self): from google.cloud.storage.bucket import Bucket - BUCKET_NAME = "bucket-name" - PROJECT = "PROJECT" - CORS = [ + bucket_name = "bucket-name" + project = "PROJECT" + cors = [ { "maxAgeSeconds": 60, "methods": ["*"], @@ -955,157 +1303,82 @@ def test_create_w_extra_properties(self): "responseHeader": ["X-Custom-Header"], } ] - LIFECYCLE_RULES = [{"action": {"type": "Delete"}, "condition": {"age": 365}}] - LOCATION = "eu" - LABELS = {"color": "red", "flavor": "cherry"} - STORAGE_CLASS = "NEARLINE" - DATA = { - "name": BUCKET_NAME, - "cors": CORS, - "lifecycle": {"rule": LIFECYCLE_RULES}, - "location": LOCATION, - "storageClass": STORAGE_CLASS, + lifecycle_rules = [{"action": {"type": "Delete"}, "condition": {"age": 365}}] + location = "eu" + labels = {"color": "red", "flavor": "cherry"} + storage_class = "NEARLINE" + api_response = { + "name": bucket_name, + "cors": cors, + "lifecycle": {"rule": lifecycle_rules}, + "location": location, + "storageClass": storage_class, "versioning": {"enabled": True}, "billing": {"requesterPays": True}, - "labels": LABELS, + "labels": labels, } + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response - connection = _make_connection(DATA) - client = Client(project=PROJECT) - client._base_connection = connection - - bucket = Bucket(client=client, name=BUCKET_NAME) - bucket.cors = CORS - bucket.lifecycle_rules = LIFECYCLE_RULES - bucket.storage_class = STORAGE_CLASS + bucket = Bucket(client=client, name=bucket_name) + bucket.cors = cors + bucket.lifecycle_rules = lifecycle_rules + bucket.storage_class = storage_class bucket.versioning_enabled = True bucket.requester_pays = True - bucket.labels = LABELS - client.create_bucket(bucket, location=LOCATION) - - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": PROJECT}, - data=DATA, - _target_object=bucket, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, - ) + bucket.labels = labels - def test_create_hit(self): - from google.cloud.storage.client import Client + client.create_bucket(bucket, location=location) - PROJECT = "PROJECT" - BUCKET_NAME = "bucket-name" - DATA = {"name": BUCKET_NAME} - connection = _make_connection(DATA) - client = Client(project=PROJECT) - client._base_connection = connection - - bucket = client.create_bucket(BUCKET_NAME) - - connection.api_request.assert_called_once_with( - method="POST", - path="/b", - query_params={"project": PROJECT}, - data=DATA, - _target_object=bucket, + expected_path = "/b" + expected_data = api_response + expected_query_params = {"project": project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, + _target_object=bucket, ) - def test_create_bucket_w_string_success(self): - from google.cloud.storage.bucket import Bucket - + def test_create_bucket_w_name_only(self): project = "PROJECT" - credentials = _make_credentials() - client = self._make_one(project=project, credentials=credentials) - bucket_name = "bucket-name" - json_expected = {"name": bucket_name} - data = json_expected - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http - - bucket = client.create_bucket(bucket_name) - - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, bucket_name) - http.request.assert_called_once_with( - method="POST", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=mock.ANY, - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]), - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], project) - json_sent = http.request.call_args_list[0][1]["data"] - self.assertEqual(json_expected, json.loads(json_sent)) - - def test_create_bucket_w_object_success(self): - from google.cloud.storage.bucket import Bucket - - project = "PROJECT" + api_response = {"name": bucket_name} credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response - bucket_name = "bucket-name" - bucket_obj = Bucket(client, bucket_name) - bucket_obj.storage_class = "COLDLINE" - bucket_obj.requester_pays = True - - json_expected = { - "name": bucket_name, - "billing": {"requesterPays": True}, - "storageClass": "COLDLINE", - } - data = json_expected - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http - - bucket = client.create_bucket(bucket_obj) + bucket = client.create_bucket(bucket_name) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, bucket_name) - self.assertTrue(bucket.requester_pays) - http.request.assert_called_once_with( - method="POST", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=mock.ANY, - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]), + expected_path = "/b" + expected_data = api_response + expected_query_params = {"project": project} + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + _target_object=bucket, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], project) - json_sent = http.request.call_args_list[0][1]["data"] - self.assertEqual(json_expected, json.loads(json_sent)) def test_download_blob_to_file_with_failure(self): from google.resumable_media import InvalidResponse from google.cloud.storage.blob import Blob from google.cloud.storage.constants import _DEFAULT_TIMEOUT + project = "PROJECT" raw_response = requests.Response() raw_response.status_code = http_client.NOT_FOUND raw_request = requests.Request("GET", "http://example.com") raw_response.request = raw_request.prepare() grmp_response = InvalidResponse(raw_response) - - credentials = _make_credentials() + credentials = _make_credentials(project=project) client = self._make_one(credentials=credentials) blob = mock.create_autospec(Blob) blob._encryption_key = None @@ -1130,13 +1403,14 @@ def test_download_blob_to_file_with_failure(self): False, checksum="md5", timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, ) def test_download_blob_to_file_with_uri(self): from google.cloud.storage.constants import _DEFAULT_TIMEOUT project = "PROJECT" - credentials = _make_credentials() + credentials = _make_credentials(project=project) client = self._make_one(project=project, credentials=credentials) blob = mock.Mock() file_obj = io.BytesIO() @@ -1160,359 +1434,334 @@ def test_download_blob_to_file_with_uri(self): False, checksum="md5", timeout=_DEFAULT_TIMEOUT, + retry=DEFAULT_RETRY, ) - def test_download_blob_to_file_with_invalid_uri(self): - project = "PROJECT" - credentials = _make_credentials() - client = self._make_one(project=project, credentials=credentials) - file_obj = io.BytesIO() - - with pytest.raises(ValueError, match="URI scheme must be gs"): - client.download_blob_to_file("http://bucket_name/path/to/object", file_obj) - - def _download_blob_to_file_helper(self, use_chunks, raw_download): - from google.cloud.storage.blob import Blob - from google.cloud.storage.constants import _DEFAULT_TIMEOUT - - credentials = _make_credentials() - client = self._make_one(credentials=credentials) - blob = mock.create_autospec(Blob) - blob._encryption_key = None - blob._get_download_url = mock.Mock() - if use_chunks: - blob._CHUNK_SIZE_MULTIPLE = 1 - blob.chunk_size = 3 - blob._do_download = mock.Mock() - - file_obj = io.BytesIO() - if raw_download: - client.download_blob_to_file(blob, file_obj, raw_download=True) - else: - client.download_blob_to_file(blob, file_obj) - - headers = {"accept-encoding": "gzip"} - blob._do_download.assert_called_once_with( - client._http, - file_obj, - blob._get_download_url(), - headers, - None, - None, - raw_download, - checksum="md5", - timeout=_DEFAULT_TIMEOUT, - ) - - def test_download_blob_to_file_wo_chunks_wo_raw(self): - self._download_blob_to_file_helper(use_chunks=False, raw_download=False) - - def test_download_blob_to_file_w_chunks_wo_raw(self): - self._download_blob_to_file_helper(use_chunks=True, raw_download=False) - - def test_download_blob_to_file_wo_chunks_w_raw(self): - self._download_blob_to_file_helper(use_chunks=False, raw_download=True) - - def test_download_blob_to_file_w_chunks_w_raw(self): - self._download_blob_to_file_helper(use_chunks=True, raw_download=True) - - def test_list_blobs(self): - from google.cloud.storage.bucket import Bucket - - BUCKET_NAME = "bucket-name" - - credentials = _make_credentials() - client = self._make_one(project="PROJECT", credentials=credentials) - connection = _make_connection({"items": []}) - - with mock.patch( - "google.cloud.storage.client.Client._connection", - new_callable=mock.PropertyMock, - ) as client_mock: - client_mock.return_value = connection - - bucket_obj = Bucket(client, BUCKET_NAME) - iterator = client.list_blobs(bucket_obj) - blobs = list(iterator) - - self.assertEqual(blobs, []) - connection.api_request.assert_called_once_with( - method="GET", - path="/b/%s/o" % BUCKET_NAME, - query_params={"projection": "noAcl"}, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, - ) - - def test_list_blobs_w_all_arguments_and_user_project(self): - from google.cloud.storage.bucket import Bucket - - BUCKET_NAME = "name" - USER_PROJECT = "user-project-123" - MAX_RESULTS = 10 - PAGE_TOKEN = "ABCD" - PREFIX = "subfolder" - DELIMITER = "/" - START_OFFSET = "c" - END_OFFSET = "g" - INCLUDE_TRAILING_DELIMITER = True - VERSIONS = True - PROJECTION = "full" - FIELDS = "items/contentLanguage,nextPageToken" - EXPECTED = { - "maxResults": 10, - "pageToken": PAGE_TOKEN, - "prefix": PREFIX, - "delimiter": DELIMITER, - "startOffset": START_OFFSET, - "endOffset": END_OFFSET, - "includeTrailingDelimiter": INCLUDE_TRAILING_DELIMITER, - "versions": VERSIONS, - "projection": PROJECTION, - "fields": FIELDS, - "userProject": USER_PROJECT, - } - - credentials = _make_credentials() - client = self._make_one(project=USER_PROJECT, credentials=credentials) - connection = _make_connection({"items": []}) - - with mock.patch( - "google.cloud.storage.client.Client._connection", - new_callable=mock.PropertyMock, - ) as client_mock: - client_mock.return_value = connection - - bucket = Bucket(client, BUCKET_NAME, user_project=USER_PROJECT) - iterator = client.list_blobs( - bucket_or_name=bucket, - max_results=MAX_RESULTS, - page_token=PAGE_TOKEN, - prefix=PREFIX, - delimiter=DELIMITER, - start_offset=START_OFFSET, - end_offset=END_OFFSET, - include_trailing_delimiter=INCLUDE_TRAILING_DELIMITER, - versions=VERSIONS, - projection=PROJECTION, - fields=FIELDS, - timeout=42, - ) - blobs = list(iterator) - - self.assertEqual(blobs, []) - connection.api_request.assert_called_once_with( - method="GET", - path="/b/%s/o" % BUCKET_NAME, - query_params=EXPECTED, - timeout=42, - retry=DEFAULT_RETRY, - ) - - def test_list_buckets_wo_project(self): - CREDENTIALS = _make_credentials() - client = self._make_one(project=None, credentials=CREDENTIALS) - - with self.assertRaises(ValueError): - client.list_buckets() - - def test_list_buckets_empty(self): - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - - http = _make_requests_session([_make_json_response({})]) - client._http_internal = http - - buckets = list(client.list_buckets()) - - self.assertEqual(len(buckets), 0) - - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=mock.ANY, - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]) - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], PROJECT) - self.assertEqual(parms["projection"], "noAcl") - - def test_list_buckets_explicit_project(self): - PROJECT = "PROJECT" - OTHER_PROJECT = "OTHER_PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - - http = _make_requests_session([_make_json_response({})]) - client._http_internal = http - - buckets = list(client.list_buckets(project=OTHER_PROJECT)) + def test_download_blob_to_file_with_invalid_uri(self): + project = "PROJECT" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + file_obj = io.BytesIO() - self.assertEqual(len(buckets), 0) + with pytest.raises(ValueError, match="URI scheme must be gs"): + client.download_blob_to_file("http://bucket_name/path/to/object", file_obj) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=mock.ANY, + def test_download_blob_to_file_w_no_retry(self): + self._download_blob_to_file_helper( + use_chunks=True, raw_download=True, retry=None ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]) + + def test_download_blob_to_file_w_conditional_retry_pass(self): + self._download_blob_to_file_helper( + use_chunks=True, + raw_download=True, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + if_generation_match=1, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], str(OTHER_PROJECT)) - self.assertEqual(parms["projection"], "noAcl") - def test_list_buckets_non_empty(self): - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + def test_download_blob_to_file_w_conditional_retry_fail(self): + self._download_blob_to_file_helper( + use_chunks=True, + raw_download=True, + retry=DEFAULT_RETRY_IF_GENERATION_SPECIFIED, + expect_condition_fail=True, + ) - BUCKET_NAME = "bucket-name" + def _download_blob_to_file_helper( + self, use_chunks, raw_download, expect_condition_fail=False, **extra_kwargs + ): + from google.cloud.storage.blob import Blob + from google.cloud.storage.constants import _DEFAULT_TIMEOUT - data = {"items": [{"name": BUCKET_NAME}]} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) + blob = mock.create_autospec(Blob) + blob._encryption_key = None + blob._get_download_url = mock.Mock() + if use_chunks: + blob._CHUNK_SIZE_MULTIPLE = 1 + blob.chunk_size = 3 + blob._do_download = mock.Mock() - buckets = list(client.list_buckets()) + file_obj = io.BytesIO() + if raw_download: + client.download_blob_to_file( + blob, file_obj, raw_download=True, **extra_kwargs + ) + else: + client.download_blob_to_file(blob, file_obj, **extra_kwargs) - self.assertEqual(len(buckets), 1) - self.assertEqual(buckets[0].name, BUCKET_NAME) + expected_retry = extra_kwargs.get("retry", DEFAULT_RETRY) + if ( + expected_retry is DEFAULT_RETRY_IF_GENERATION_SPECIFIED + and not expect_condition_fail + ): + expected_retry = DEFAULT_RETRY + elif expect_condition_fail: + expected_retry = None - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=self._get_default_timeout(), + headers = {"accept-encoding": "gzip"} + blob._do_download.assert_called_once_with( + client._http, + file_obj, + blob._get_download_url(), + headers, + None, + None, + raw_download, + checksum="md5", + timeout=_DEFAULT_TIMEOUT, + retry=expected_retry, ) - def test_list_buckets_all_arguments(self): - PROJECT = "foo-bar" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + def test_download_blob_to_file_wo_chunks_wo_raw(self): + self._download_blob_to_file_helper(use_chunks=False, raw_download=False) - MAX_RESULTS = 10 - PAGE_TOKEN = "ABCD" - PREFIX = "subfolder" - PROJECTION = "full" - FIELDS = "items/id,nextPageToken" + def test_download_blob_to_file_w_chunks_wo_raw(self): + self._download_blob_to_file_helper(use_chunks=True, raw_download=False) - data = {"items": []} - http = _make_requests_session([_make_json_response(data)]) - client._http_internal = http - iterator = client.list_buckets( - max_results=MAX_RESULTS, - page_token=PAGE_TOKEN, - prefix=PREFIX, - projection=PROJECTION, - fields=FIELDS, - timeout=42, - ) - buckets = list(iterator) - self.assertEqual(buckets, []) - http.request.assert_called_once_with( - method="GET", url=mock.ANY, data=mock.ANY, headers=mock.ANY, timeout=42 - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, "/".join(["", "storage", client._connection.API_VERSION, "b"]) - ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["project"], PROJECT) - self.assertEqual(parms["maxResults"], str(MAX_RESULTS)) - self.assertEqual(parms["pageToken"], PAGE_TOKEN) - self.assertEqual(parms["prefix"], PREFIX) - self.assertEqual(parms["projection"], PROJECTION) - self.assertEqual(parms["fields"], FIELDS) + def test_download_blob_to_file_wo_chunks_w_raw(self): + self._download_blob_to_file_helper(use_chunks=False, raw_download=True) + + def test_download_blob_to_file_w_chunks_w_raw(self): + self._download_blob_to_file_helper(use_chunks=True, raw_download=True) - def test_list_buckets_page_empty_response(self): - from google.api_core import page_iterator + def test_list_blobs_w_defaults_w_bucket_obj(self): + from google.cloud.storage.bucket import Bucket + from google.cloud.storage.bucket import _blobs_page_start + from google.cloud.storage.bucket import _item_to_blob project = "PROJECT" + bucket_name = "bucket-name" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) - iterator = client.list_buckets() - page = page_iterator.Page(iterator, (), None) - iterator._page = page - self.assertEqual(list(page), []) + client._list_resource = mock.Mock(spec=[]) + bucket = Bucket(client, bucket_name) + + iterator = client.list_blobs(bucket) + + self.assertIs(iterator, client._list_resource.return_value) + self.assertIs(iterator.bucket, bucket) + self.assertEqual(iterator.prefixes, set()) + + expected_path = "/b/{}/o".format(bucket_name) + expected_item_to_value = _item_to_blob + expected_page_token = None + expected_max_results = None + expected_extra_params = {"projection": "noAcl"} + expected_page_start = _blobs_page_start + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + page_token=expected_page_token, + max_results=expected_max_results, + extra_params=expected_extra_params, + page_start=expected_page_start, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) - def test_list_buckets_page_non_empty_response(self): - import six - from google.cloud.storage.bucket import Bucket + def test_list_blobs_w_explicit_w_user_project(self): + from google.cloud.storage.bucket import _blobs_page_start + from google.cloud.storage.bucket import _item_to_blob project = "PROJECT" + user_project = "user-project-123" + bucket_name = "name" + max_results = 10 + page_token = "ABCD" + prefix = "subfolder" + delimiter = "/" + start_offset = "c" + end_offset = "g" + include_trailing_delimiter = True + versions = True + projection = "full" + fields = "items/contentLanguage,nextPageToken" credentials = _make_credentials() client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) + client._bucket_arg_to_bucket = mock.Mock(spec=[]) + bucket = client._bucket_arg_to_bucket.return_value = mock.Mock( + spec=["path", "user_project"], + ) + bucket.path = "/b/{}".format(bucket_name) + bucket.user_project = user_project + timeout = 42 + retry = mock.Mock(spec=[]) + + iterator = client.list_blobs( + bucket_or_name=bucket_name, + max_results=max_results, + page_token=page_token, + prefix=prefix, + delimiter=delimiter, + start_offset=start_offset, + end_offset=end_offset, + include_trailing_delimiter=include_trailing_delimiter, + versions=versions, + projection=projection, + fields=fields, + timeout=timeout, + retry=retry, + ) + + self.assertIs(iterator, client._list_resource.return_value) + self.assertIs(iterator.bucket, bucket) + self.assertEqual(iterator.prefixes, set()) + + expected_path = "/b/{}/o".format(bucket_name) + expected_item_to_value = _item_to_blob + expected_page_token = page_token + expected_max_results = max_results + expected_extra_params = { + "projection": projection, + "prefix": prefix, + "delimiter": delimiter, + "startOffset": start_offset, + "endOffset": end_offset, + "includeTrailingDelimiter": include_trailing_delimiter, + "versions": versions, + "fields": fields, + "userProject": user_project, + } + expected_page_start = _blobs_page_start + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + page_token=expected_page_token, + max_results=expected_max_results, + extra_params=expected_extra_params, + page_start=expected_page_start, + timeout=timeout, + retry=retry, + ) + + def test_list_buckets_wo_project(self): + credentials = _make_credentials() + client = self._make_one(project=None, credentials=credentials) + + with self.assertRaises(ValueError): + client.list_buckets() - blob_name = "bucket-name" - response = {"items": [{"name": blob_name}]} + def test_list_buckets_w_defaults(self): + from google.cloud.storage.client import _item_to_bucket - def fake_response(): - return response + project = "PROJECT" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) iterator = client.list_buckets() - iterator._get_next_page_response = fake_response - page = six.next(iterator.pages) - self.assertEqual(page.num_items, 1) - bucket = six.next(page) - self.assertEqual(page.remaining, 0) - self.assertIsInstance(bucket, Bucket) - self.assertEqual(bucket.name, blob_name) + self.assertIs(iterator, client._list_resource.return_value) + + expected_path = "/b" + expected_item_to_value = _item_to_bucket + expected_page_token = None + expected_max_results = None + expected_extra_params = { + "project": project, + "projection": "noAcl", + } + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + page_token=expected_page_token, + max_results=expected_max_results, + extra_params=expected_extra_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, + ) + + def test_list_buckets_w_explicit(self): + from google.cloud.storage.client import _item_to_bucket + + project = "foo-bar" + other_project = "OTHER_PROJECT" + max_results = 10 + page_token = "ABCD" + prefix = "subfolder" + projection = "full" + fields = "items/id,nextPageToken" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) + timeout = 42 + retry = mock.Mock(spec=[]) + + iterator = client.list_buckets( + project=other_project, + max_results=max_results, + page_token=page_token, + prefix=prefix, + projection=projection, + fields=fields, + timeout=timeout, + retry=retry, + ) + + self.assertIs(iterator, client._list_resource.return_value) + + expected_path = "/b" + expected_item_to_value = _item_to_bucket + expected_page_token = page_token + expected_max_results = max_results + expected_extra_params = { + "project": other_project, + "prefix": prefix, + "projection": projection, + "fields": fields, + } + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + page_token=expected_page_token, + max_results=expected_max_results, + extra_params=expected_extra_params, + timeout=timeout, + retry=retry, + ) def _create_hmac_key_helper( - self, explicit_project=None, user_project=None, timeout=None + self, explicit_project=None, user_project=None, timeout=None, retry=None, ): import datetime from pytz import UTC from google.cloud.storage.hmac_key import HMACKeyMetadata - PROJECT = "PROJECT" - ACCESS_ID = "ACCESS-ID" - CREDENTIALS = _make_credentials() - EMAIL = "storage-user-123@example.com" - SECRET = "a" * 40 + project = "PROJECT" + access_id = "ACCESS-ID" + credentials = _make_credentials() + email = "storage-user-123@example.com" + secret = "a" * 40 now = datetime.datetime.utcnow().replace(tzinfo=UTC) now_stamp = "{}Z".format(now.isoformat()) if explicit_project is not None: expected_project = explicit_project else: - expected_project = PROJECT + expected_project = project - RESOURCE = { + api_response = { "kind": "storage#hmacKey", "metadata": { - "accessId": ACCESS_ID, + "accessId": access_id, "etag": "ETAG", - "id": "projects/{}/hmacKeys/{}".format(PROJECT, ACCESS_ID), + "id": "projects/{}/hmacKeys/{}".format(project, access_id), "project": expected_project, "state": "ACTIVE", - "serviceAccountEmail": EMAIL, + "serviceAccountEmail": email, "timeCreated": now_stamp, "updated": now_stamp, }, - "secret": SECRET, + "secret": secret, } - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - http = _make_requests_session([_make_json_response(RESOURCE)]) - client._http_internal = http + client = self._make_one(project=project, credentials=credentials) + client._post_resource = mock.Mock() + client._post_resource.return_value = api_response kwargs = {} if explicit_project is not None: @@ -1522,43 +1771,37 @@ def _create_hmac_key_helper( kwargs["user_project"] = user_project if timeout is None: - timeout = self._get_default_timeout() - kwargs["timeout"] = timeout + expected_timeout = self._get_default_timeout() + else: + expected_timeout = kwargs["timeout"] = timeout + + if retry is None: + expected_retry = None + else: + expected_retry = kwargs["retry"] = retry - metadata, secret = client.create_hmac_key(service_account_email=EMAIL, **kwargs) + metadata, secret = client.create_hmac_key(service_account_email=email, **kwargs) self.assertIsInstance(metadata, HMACKeyMetadata) + self.assertIs(metadata._client, client) - self.assertEqual(metadata._properties, RESOURCE["metadata"]) - self.assertEqual(secret, RESOURCE["secret"]) + self.assertEqual(metadata._properties, api_response["metadata"]) + self.assertEqual(secret, api_response["secret"]) - qs_params = {"serviceAccountEmail": EMAIL} + expected_path = "/projects/{}/hmacKeys".format(expected_project) + expected_data = None + expected_query_params = {"serviceAccountEmail": email} if user_project is not None: - qs_params["userProject"] = user_project + expected_query_params["userProject"] = user_project - http.request.assert_called_once_with( - method="POST", url=mock.ANY, data=None, headers=mock.ANY, timeout=timeout - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join( - [ - "", - "storage", - client._connection.API_VERSION, - "projects", - expected_project, - "hmacKeys", - ] - ), + client._post_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=expected_timeout, + retry=expected_retry, ) - parms = dict(urlparse.parse_qsl(qs)) - for param, expected in qs_params.items(): - self.assertEqual(parms[param], expected) def test_create_hmac_key_defaults(self): self._create_hmac_key_helper() @@ -1566,113 +1809,83 @@ def test_create_hmac_key_defaults(self): def test_create_hmac_key_explicit_project(self): self._create_hmac_key_helper(explicit_project="other-project-456") - def test_create_hmac_key_user_project(self): - self._create_hmac_key_helper(user_project="billed-project", timeout=42) - - def test_list_hmac_keys_defaults_empty(self): - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - - http = _make_requests_session([_make_json_response({})]) - client._http_internal = http - - metadatas = list(client.list_hmac_keys()) + def test_create_hmac_key_w_user_project(self): + self._create_hmac_key_helper(user_project="billed-project") - self.assertEqual(len(metadatas), 0) + def test_create_hmac_key_w_timeout(self): + self._create_hmac_key_helper(timeout=42) - http.request.assert_called_once_with( - method="GET", - url=mock.ANY, - data=None, - headers=mock.ANY, - timeout=self._get_default_timeout(), - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join( - [ - "", - "storage", - client._connection.API_VERSION, - "projects", - PROJECT, - "hmacKeys", - ] - ), - ) + def test_create_hmac_key_w_retry(self): + self._create_hmac_key_helper(retry=mock.Mock(spec=[])) - def test_list_hmac_keys_explicit_non_empty(self): - from google.cloud.storage.hmac_key import HMACKeyMetadata + def test_list_hmac_keys_w_defaults(self): + from google.cloud.storage.client import _item_to_hmac_key_metadata - PROJECT = "PROJECT" - OTHER_PROJECT = "other-project-456" - MAX_RESULTS = 3 - EMAIL = "storage-user-123@example.com" - ACCESS_ID = "ACCESS-ID" - USER_PROJECT = "billed-project" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) + project = "PROJECT" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) - response = { - "kind": "storage#hmacKeysMetadata", - "items": [ - { - "kind": "storage#hmacKeyMetadata", - "accessId": ACCESS_ID, - "serviceAccountEmail": EMAIL, - } - ], - } + iterator = client.list_hmac_keys() - http = _make_requests_session([_make_json_response(response)]) - client._http_internal = http + self.assertIs(iterator, client._list_resource.return_value) - metadatas = list( - client.list_hmac_keys( - max_results=MAX_RESULTS, - service_account_email=EMAIL, - show_deleted_keys=True, - project_id=OTHER_PROJECT, - user_project=USER_PROJECT, - timeout=42, - ) + expected_path = "/projects/{}/hmacKeys".format(project) + expected_item_to_value = _item_to_hmac_key_metadata + expected_max_results = None + expected_extra_params = {} + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + max_results=expected_max_results, + extra_params=expected_extra_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - self.assertEqual(len(metadatas), len(response["items"])) + def test_list_hmac_keys_w_explicit(self): + from google.cloud.storage.client import _item_to_hmac_key_metadata - for metadata, resource in zip(metadatas, response["items"]): - self.assertIsInstance(metadata, HMACKeyMetadata) - self.assertIs(metadata._client, client) - self.assertEqual(metadata._properties, resource) - - http.request.assert_called_once_with( - method="GET", url=mock.ANY, data=None, headers=mock.ANY, timeout=42 - ) - _, kwargs = http.request.call_args - scheme, netloc, path, qs, _ = urlparse.urlsplit(kwargs.get("url")) - self.assertEqual("%s://%s" % (scheme, netloc), client._connection.API_BASE_URL) - self.assertEqual( - path, - "/".join( - [ - "", - "storage", - client._connection.API_VERSION, - "projects", - OTHER_PROJECT, - "hmacKeys", - ] - ), + project = "PROJECT" + other_project = "other-project-456" + max_results = 3 + show_deleted_keys = True + service_account_email = "storage-user-123@example.com" + user_project = "billed-project" + credentials = _make_credentials() + client = self._make_one(project=project, credentials=credentials) + client._list_resource = mock.Mock(spec=[]) + timeout = 42 + retry = mock.Mock(spec=[]) + + iterator = client.list_hmac_keys( + max_results=max_results, + service_account_email=service_account_email, + show_deleted_keys=show_deleted_keys, + project_id=other_project, + user_project=user_project, + timeout=timeout, + retry=retry, + ) + + self.assertIs(iterator, client._list_resource.return_value) + + expected_path = "/projects/{}/hmacKeys".format(other_project) + expected_item_to_value = _item_to_hmac_key_metadata + expected_max_results = max_results + expected_extra_params = { + "serviceAccountEmail": service_account_email, + "showDeletedKeys": show_deleted_keys, + "userProject": user_project, + } + client._list_resource.assert_called_once_with( + expected_path, + expected_item_to_value, + max_results=expected_max_results, + extra_params=expected_extra_params, + timeout=timeout, + retry=retry, ) - parms = dict(urlparse.parse_qsl(qs)) - self.assertEqual(parms["maxResults"], str(MAX_RESULTS)) - self.assertEqual(parms["serviceAccountEmail"], EMAIL) - self.assertEqual(parms["showDeletedKeys"], "True") - self.assertEqual(parms["userProject"], USER_PROJECT) def test_get_hmac_key_metadata_wo_project(self): from google.cloud.storage.hmac_key import HMACKeyMetadata @@ -1786,7 +1999,9 @@ def test_get_signed_policy_v4(self): EXPECTED_SIGN = "5369676e61747572655f6279746573" EXPECTED_POLICY = "eyJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJidWNrZXQtbmFtZSJ9LHsiYWNsIjoicHJpdmF0ZSJ9LFsic3RhcnRzLXdpdGgiLCIkQ29udGVudC1UeXBlIiwidGV4dC9wbGFpbiJdLHsiYnVja2V0IjoiYnVja2V0LW5hbWUifSx7ImtleSI6Im9iamVjdC1uYW1lIn0seyJ4LWdvb2ctZGF0ZSI6IjIwMjAwMzEyVDExNDcxNloifSx7IngtZ29vZy1jcmVkZW50aWFsIjoidGVzdEBtYWlsLmNvbS8yMDIwMDMxMi9hdXRvL3N0b3JhZ2UvZ29vZzRfcmVxdWVzdCJ9LHsieC1nb29nLWFsZ29yaXRobSI6IkdPT0c0LVJTQS1TSEEyNTYifV0sImV4cGlyYXRpb24iOiIyMDIwLTAzLTI2VDAwOjAwOjEwWiJ9" - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, now_patch, expire_secs_patch = _time_functions_patches() with dtstamps_patch, now_patch, expire_secs_patch: @@ -1864,7 +2079,9 @@ def test_get_signed_policy_v4_with_fields(self): EXPECTED_SIGN = "5369676e61747572655f6279746573" EXPECTED_POLICY = "eyJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJidWNrZXQtbmFtZSJ9LHsiYWNsIjoicHJpdmF0ZSJ9LFsic3RhcnRzLXdpdGgiLCIkQ29udGVudC1UeXBlIiwidGV4dC9wbGFpbiJdLHsiZmllbGQxIjoiVmFsdWUxIn0seyJidWNrZXQiOiJidWNrZXQtbmFtZSJ9LHsia2V5Ijoib2JqZWN0LW5hbWUifSx7IngtZ29vZy1kYXRlIjoiMjAyMDAzMTJUMTE0NzE2WiJ9LHsieC1nb29nLWNyZWRlbnRpYWwiOiJ0ZXN0QG1haWwuY29tLzIwMjAwMzEyL2F1dG8vc3RvcmFnZS9nb29nNF9yZXF1ZXN0In0seyJ4LWdvb2ctYWxnb3JpdGhtIjoiR09PRzQtUlNBLVNIQTI1NiJ9XSwiZXhwaXJhdGlvbiI6IjIwMjAtMDMtMjZUMDA6MDA6MTBaIn0=" - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, now_patch, expire_secs_patch = _time_functions_patches() with dtstamps_patch, now_patch, expire_secs_patch: @@ -1902,7 +2119,9 @@ def test_get_signed_policy_v4_virtual_hosted_style(self): BUCKET_NAME = "bucket-name" - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, _, _ = _time_functions_patches() with dtstamps_patch: @@ -1920,7 +2139,9 @@ def test_get_signed_policy_v4_virtual_hosted_style(self): def test_get_signed_policy_v4_bucket_bound_hostname(self): import datetime - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, _, _ = _time_functions_patches() with dtstamps_patch: @@ -1936,7 +2157,9 @@ def test_get_signed_policy_v4_bucket_bound_hostname(self): def test_get_signed_policy_v4_bucket_bound_hostname_with_scheme(self): import datetime - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, _, _ = _time_functions_patches() with dtstamps_patch: @@ -1954,7 +2177,9 @@ def test_get_signed_policy_v4_no_expiration(self): BUCKET_NAME = "bucket-name" EXPECTED_POLICY = "eyJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJidWNrZXQtbmFtZSJ9LHsia2V5Ijoib2JqZWN0LW5hbWUifSx7IngtZ29vZy1kYXRlIjoiMjAyMDAzMTJUMTE0NzE2WiJ9LHsieC1nb29nLWNyZWRlbnRpYWwiOiJ0ZXN0QG1haWwuY29tLzIwMjAwMzEyL2F1dG8vc3RvcmFnZS9nb29nNF9yZXF1ZXN0In0seyJ4LWdvb2ctYWxnb3JpdGhtIjoiR09PRzQtUlNBLVNIQTI1NiJ9XSwiZXhwaXJhdGlvbiI6IjIwMjAtMDMtMjZUMDA6MDA6MTBaIn0=" - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, now_patch, expire_secs_patch = _time_functions_patches() with dtstamps_patch, now_patch, expire_secs_patch: @@ -1978,7 +2203,9 @@ def test_get_signed_policy_v4_with_access_token(self): EXPECTED_SIGN = "0c4003044105" EXPECTED_POLICY = "eyJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJidWNrZXQtbmFtZSJ9LHsiYWNsIjoicHJpdmF0ZSJ9LFsic3RhcnRzLXdpdGgiLCIkQ29udGVudC1UeXBlIiwidGV4dC9wbGFpbiJdLHsiYnVja2V0IjoiYnVja2V0LW5hbWUifSx7ImtleSI6Im9iamVjdC1uYW1lIn0seyJ4LWdvb2ctZGF0ZSI6IjIwMjAwMzEyVDExNDcxNloifSx7IngtZ29vZy1jcmVkZW50aWFsIjoidGVzdEBtYWlsLmNvbS8yMDIwMDMxMi9hdXRvL3N0b3JhZ2UvZ29vZzRfcmVxdWVzdCJ9LHsieC1nb29nLWFsZ29yaXRobSI6IkdPT0c0LVJTQS1TSEEyNTYifV0sImV4cGlyYXRpb24iOiIyMDIwLTAzLTI2VDAwOjAwOjEwWiJ9" - client = self._make_one(project="PROJECT") + project = "PROJECT" + credentials = _make_credentials(project=project) + client = self._make_one(credentials=credentials) dtstamps_patch, now_patch, expire_secs_patch = _time_functions_patches() with dtstamps_patch, now_patch, expire_secs_patch: @@ -2013,32 +2240,57 @@ def test_get_signed_policy_v4_with_access_token(self): self.assertEqual(fields["x-goog-signature"], EXPECTED_SIGN) self.assertEqual(fields["policy"], EXPECTED_POLICY) - def test_list_buckets_retries_error(self): - PROJECT = "PROJECT" - CREDENTIALS = _make_credentials() - client = self._make_one(project=PROJECT, credentials=CREDENTIALS) - BUCKET_NAME = "bucket-name" +class Test__item_to_bucket(unittest.TestCase): + def _call_fut(self, iterator, item): + from google.cloud.storage.client import _item_to_bucket - data = {"items": [{"name": BUCKET_NAME}]} - http = _make_requests_session( - [exceptions.InternalServerError("mock error"), _make_json_response(data)] - ) - client._http_internal = http + return _item_to_bucket(iterator, item) - buckets = list(client.list_buckets()) + def test_w_empty_item(self): + from google.cloud.storage.bucket import Bucket - self.assertEqual(len(buckets), 1) - self.assertEqual(buckets[0].name, BUCKET_NAME) + iterator = mock.Mock(spec=["client"]) + item = {} - call = mock.call( - method="GET", - url=mock.ANY, - data=mock.ANY, - headers=mock.ANY, - timeout=self._get_default_timeout(), - ) - http.request.assert_has_calls([call, call]) + bucket = self._call_fut(iterator, item) + + self.assertIsInstance(bucket, Bucket) + self.assertIs(bucket.client, iterator.client) + self.assertIsNone(bucket.name) + + def test_w_name(self): + from google.cloud.storage.bucket import Bucket + + name = "name" + iterator = mock.Mock(spec=["client"]) + item = {"name": name} + + bucket = self._call_fut(iterator, item) + + self.assertIsInstance(bucket, Bucket) + self.assertIs(bucket.client, iterator.client) + self.assertEqual(bucket.name, name) + + +class Test__item_to_hmac_key_metadata(unittest.TestCase): + def _call_fut(self, iterator, item): + from google.cloud.storage.client import _item_to_hmac_key_metadata + + return _item_to_hmac_key_metadata(iterator, item) + + def test_it(self): + from google.cloud.storage.hmac_key import HMACKeyMetadata + + access_id = "ABCDE" + iterator = mock.Mock(spec=["client"]) + item = {"id": access_id} + + metadata = self._call_fut(iterator, item) + + self.assertIsInstance(metadata, HMACKeyMetadata) + self.assertIs(metadata._client, iterator.client) + self.assertEqual(metadata._properties, item) @pytest.mark.parametrize("test_data", _POST_POLICY_TESTS) diff --git a/tests/unit/test_fileio.py b/tests/unit/test_fileio.py index 0ac16ab24..6ce9b4990 100644 --- a/tests/unit/test_fileio.py +++ b/tests/unit/test_fileio.py @@ -19,8 +19,10 @@ import io import string +from google.cloud.storage._helpers import _NUM_RETRIES_MESSAGE from google.cloud.storage.fileio import BlobReader, BlobWriter, SlidingBuffer from google.api_core.exceptions import RequestRangeNotSatisfiable +from google.cloud.storage.retry import DEFAULT_RETRY TEST_TEXT_DATA = string.ascii_lowercase + "\n" + string.ascii_uppercase + "\n" TEST_BINARY_DATA = TEST_TEXT_DATA.encode("utf-8") @@ -37,7 +39,15 @@ def test_attributes(self): self.assertTrue(reader.seekable()) self.assertTrue(reader.readable()) self.assertFalse(reader.writable()) - self.assertEqual(256, reader._chunk_size) + self.assertEqual(reader._chunk_size, 256) + self.assertEqual(reader._retry, DEFAULT_RETRY) + + def test_attributes_explict(self): + blob = mock.Mock() + blob.chunk_size = 256 + reader = BlobReader(blob, chunk_size=1024, retry=None) + self.assertEqual(reader._chunk_size, 1024) + self.assertIsNone(reader._retry) def test_read(self): blob = mock.Mock() @@ -52,7 +62,7 @@ def read_from_fake_data(start=0, end=None, **_): # Read and trigger the first download of chunk_size. self.assertEqual(reader.read(1), TEST_BINARY_DATA[0:1]) blob.download_as_bytes.assert_called_once_with( - start=0, end=8, checksum=None, **download_kwargs + start=0, end=8, checksum=None, retry=DEFAULT_RETRY, **download_kwargs ) # Read from buffered data only. @@ -64,7 +74,7 @@ def read_from_fake_data(start=0, end=None, **_): self.assertEqual(reader._pos, 12) self.assertEqual(blob.download_as_bytes.call_count, 2) blob.download_as_bytes.assert_called_with( - start=8, end=16, checksum=None, **download_kwargs + start=8, end=16, checksum=None, retry=DEFAULT_RETRY, **download_kwargs ) # Read a larger amount, requiring a download larger than chunk_size. @@ -72,14 +82,32 @@ def read_from_fake_data(start=0, end=None, **_): self.assertEqual(reader._pos, 28) self.assertEqual(blob.download_as_bytes.call_count, 3) blob.download_as_bytes.assert_called_with( - start=16, end=28, checksum=None, **download_kwargs + start=16, end=28, checksum=None, retry=DEFAULT_RETRY, **download_kwargs ) # Read all remaining data. self.assertEqual(reader.read(), TEST_BINARY_DATA[28:]) self.assertEqual(blob.download_as_bytes.call_count, 4) blob.download_as_bytes.assert_called_with( - start=28, end=None, checksum=None, **download_kwargs + start=28, end=None, checksum=None, retry=DEFAULT_RETRY, **download_kwargs + ) + + reader.close() + + def test_retry_passed_through(self): + blob = mock.Mock() + + def read_from_fake_data(start=0, end=None, **_): + return TEST_BINARY_DATA[start:end] + + blob.download_as_bytes = mock.Mock(side_effect=read_from_fake_data) + download_kwargs = {"if_metageneration_match": 1} + reader = BlobReader(blob, chunk_size=8, retry=None, **download_kwargs) + + # Read and trigger the first download of chunk_size. + self.assertEqual(reader.read(1), TEST_BINARY_DATA[0:1]) + blob.download_as_bytes.assert_called_once_with( + start=0, end=8, checksum=None, retry=None, **download_kwargs ) reader.close() @@ -104,12 +132,16 @@ def read_from_fake_data(start=0, end=None, **_): # Read a line. With chunk_size=10, expect three chunks downloaded. self.assertEqual(reader.readline(), TEST_BINARY_DATA[:27]) - blob.download_as_bytes.assert_called_with(start=20, end=30, checksum=None) + blob.download_as_bytes.assert_called_with( + start=20, end=30, checksum=None, retry=DEFAULT_RETRY + ) self.assertEqual(blob.download_as_bytes.call_count, 3) # Read another line. self.assertEqual(reader.readline(), TEST_BINARY_DATA[27:]) - blob.download_as_bytes.assert_called_with(start=50, end=60, checksum=None) + blob.download_as_bytes.assert_called_with( + start=50, end=60, checksum=None, retry=DEFAULT_RETRY + ) self.assertEqual(blob.download_as_bytes.call_count, 6) blob.size = len(TEST_BINARY_DATA) @@ -118,7 +150,10 @@ def read_from_fake_data(start=0, end=None, **_): # Read all lines. The readlines algorithm will attempt to read past the end of the last line once to verify there is no more to read. self.assertEqual(b"".join(reader.readlines()), TEST_BINARY_DATA) blob.download_as_bytes.assert_called_with( - start=len(TEST_BINARY_DATA), end=len(TEST_BINARY_DATA) + 10, checksum=None + start=len(TEST_BINARY_DATA), + end=len(TEST_BINARY_DATA) + 10, + checksum=None, + retry=DEFAULT_RETRY, ) self.assertEqual(blob.download_as_bytes.call_count, 13) @@ -209,7 +244,14 @@ def test_attributes(self): self.assertFalse(writer.seekable()) self.assertFalse(writer.readable()) self.assertTrue(writer.writable()) - self.assertEqual(256 * 1024, writer._chunk_size) + self.assertEqual(writer._chunk_size, 256 * 1024) + + def test_attributes_explicit(self): + blob = mock.Mock() + blob.chunk_size = 256 * 1024 + writer = BlobWriter(blob, chunk_size=512 * 1024, retry=DEFAULT_RETRY) + self.assertEqual(writer._chunk_size, 512 * 1024) + self.assertEqual(writer._retry, DEFAULT_RETRY) def test_reject_wrong_chunk_size(self): blob = mock.Mock() @@ -261,6 +303,7 @@ def test_write(self): None, NUM_RETRIES, chunk_size=chunk_size, + retry=None, **upload_kwargs ) upload.transmit_next_chunk.assert_called_with(transport) @@ -286,7 +329,56 @@ def test_seek_fails(self): with self.assertRaises(io.UnsupportedOperation): writer.seek() - def test_conditional_retries(self): + def test_conditional_retry_failure(self): + blob = mock.Mock() + + upload = mock.Mock() + transport = mock.Mock() + + blob._initiate_resumable_upload.return_value = (upload, transport) + + with mock.patch("google.cloud.storage.fileio.CHUNK_SIZE_MULTIPLE", 1): + # Create a writer. + # It would be normal to use a context manager here, but not doing so + # gives us more control over close() for test purposes. + chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. + writer = BlobWriter( + blob, chunk_size=chunk_size, content_type=PLAIN_CONTENT_TYPE, + ) + + # The transmit_next_chunk method must actually consume bytes from the + # sliding buffer for the flush() feature to work properly. + upload.transmit_next_chunk.side_effect = lambda _: writer._buffer.read( + chunk_size + ) + + # Write under chunk_size. This should be buffered and the upload not + # initiated. + writer.write(TEST_BINARY_DATA[0:4]) + blob.initiate_resumable_upload.assert_not_called() + + # Write over chunk_size. This should result in upload initialization + # and multiple chunks uploaded. + # Due to the condition not being fulfilled, retry should be None. + writer.write(TEST_BINARY_DATA[4:32]) + blob._initiate_resumable_upload.assert_called_once_with( + blob.bucket.client, + writer._buffer, + PLAIN_CONTENT_TYPE, + None, # size + None, # num_retries + chunk_size=chunk_size, + retry=None, + ) + upload.transmit_next_chunk.assert_called_with(transport) + self.assertEqual(upload.transmit_next_chunk.call_count, 4) + + # Write another byte, finalize and close. + writer.write(TEST_BINARY_DATA[32:33]) + writer.close() + self.assertEqual(upload.transmit_next_chunk.call_count, 5) + + def test_conditional_retry_pass(self): blob = mock.Mock() upload = mock.Mock() @@ -302,8 +394,8 @@ def test_conditional_retries(self): writer = BlobWriter( blob, chunk_size=chunk_size, - num_retries=None, content_type=PLAIN_CONTENT_TYPE, + if_metageneration_match=1, ) # The transmit_next_chunk method must actually consume bytes from the @@ -319,15 +411,69 @@ def test_conditional_retries(self): # Write over chunk_size. This should result in upload initialization # and multiple chunks uploaded. - # Due to the condition not being fulfilled, num_retries should be 0. + # Due to the condition being fulfilled, retry should be DEFAULT_RETRY. writer.write(TEST_BINARY_DATA[4:32]) blob._initiate_resumable_upload.assert_called_once_with( blob.bucket.client, writer._buffer, PLAIN_CONTENT_TYPE, - None, - 0, + None, # size + None, # num_retries + chunk_size=chunk_size, + retry=DEFAULT_RETRY, + if_metageneration_match=1, + ) + upload.transmit_next_chunk.assert_called_with(transport) + self.assertEqual(upload.transmit_next_chunk.call_count, 4) + + # Write another byte, finalize and close. + writer.write(TEST_BINARY_DATA[32:33]) + writer.close() + self.assertEqual(upload.transmit_next_chunk.call_count, 5) + + @mock.patch("warnings.warn") + def test_forced_default_retry(self, mock_warn): + blob = mock.Mock() + + upload = mock.Mock() + transport = mock.Mock() + + blob._initiate_resumable_upload.return_value = (upload, transport) + + with mock.patch("google.cloud.storage.fileio.CHUNK_SIZE_MULTIPLE", 1): + # Create a writer. + # It would be normal to use a context manager here, but not doing so + # gives us more control over close() for test purposes. + chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. + writer = BlobWriter( + blob, + chunk_size=chunk_size, + content_type=PLAIN_CONTENT_TYPE, + retry=DEFAULT_RETRY, + ) + + # The transmit_next_chunk method must actually consume bytes from the + # sliding buffer for the flush() feature to work properly. + upload.transmit_next_chunk.side_effect = lambda _: writer._buffer.read( + chunk_size + ) + + # Write under chunk_size. This should be buffered and the upload not + # initiated. + writer.write(TEST_BINARY_DATA[0:4]) + blob.initiate_resumable_upload.assert_not_called() + + # Write over chunk_size. This should result in upload initialization + # and multiple chunks uploaded. + writer.write(TEST_BINARY_DATA[4:32]) + blob._initiate_resumable_upload.assert_called_once_with( + blob.bucket.client, + writer._buffer, + PLAIN_CONTENT_TYPE, + None, # size + None, # num_retries chunk_size=chunk_size, + retry=DEFAULT_RETRY, ) upload.transmit_next_chunk.assert_called_with(transport) self.assertEqual(upload.transmit_next_chunk.call_count, 4) @@ -337,6 +483,99 @@ def test_conditional_retries(self): writer.close() self.assertEqual(upload.transmit_next_chunk.call_count, 5) + def test_num_retries_and_retry_conflict(self): + blob = mock.Mock() + + blob._initiate_resumable_upload.side_effect = ValueError + + with mock.patch("google.cloud.storage.fileio.CHUNK_SIZE_MULTIPLE", 1): + # Create a writer. + # It would be normal to use a context manager here, but not doing so + # gives us more control over close() for test purposes. + chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. + writer = BlobWriter( + blob, + chunk_size=chunk_size, + content_type=PLAIN_CONTENT_TYPE, + num_retries=2, + retry=DEFAULT_RETRY, + ) + + # Write under chunk_size. This should be buffered and the upload not + # initiated. + writer.write(TEST_BINARY_DATA[0:4]) + blob.initiate_resumable_upload.assert_not_called() + + # Write over chunk_size. The mock will raise a ValueError, simulating + # actual behavior when num_retries and retry are both specified. + with self.assertRaises(ValueError): + writer.write(TEST_BINARY_DATA[4:32]) + + blob._initiate_resumable_upload.assert_called_once_with( + blob.bucket.client, + writer._buffer, + PLAIN_CONTENT_TYPE, + None, # size + 2, # num_retries + chunk_size=chunk_size, + retry=DEFAULT_RETRY, + ) + + @mock.patch("warnings.warn") + def test_num_retries_only(self, mock_warn): + blob = mock.Mock() + + upload = mock.Mock() + transport = mock.Mock() + + blob._initiate_resumable_upload.return_value = (upload, transport) + + with mock.patch("google.cloud.storage.fileio.CHUNK_SIZE_MULTIPLE", 1): + # Create a writer. + # It would be normal to use a context manager here, but not doing so + # gives us more control over close() for test purposes. + chunk_size = 8 # Note: Real upload requires a multiple of 256KiB. + writer = BlobWriter( + blob, + chunk_size=chunk_size, + content_type=PLAIN_CONTENT_TYPE, + num_retries=2, + ) + + # The transmit_next_chunk method must actually consume bytes from the + # sliding buffer for the flush() feature to work properly. + upload.transmit_next_chunk.side_effect = lambda _: writer._buffer.read( + chunk_size + ) + + # Write under chunk_size. This should be buffered and the upload not + # initiated. + writer.write(TEST_BINARY_DATA[0:4]) + blob.initiate_resumable_upload.assert_not_called() + + # Write over chunk_size. This should result in upload initialization + # and multiple chunks uploaded. + writer.write(TEST_BINARY_DATA[4:32]) + blob._initiate_resumable_upload.assert_called_once_with( + blob.bucket.client, + writer._buffer, + PLAIN_CONTENT_TYPE, + None, # size + 2, # num_retries + chunk_size=chunk_size, + retry=None, + ) + upload.transmit_next_chunk.assert_called_with(transport) + self.assertEqual(upload.transmit_next_chunk.call_count, 4) + mock_warn.assert_called_once_with( + _NUM_RETRIES_MESSAGE, DeprecationWarning, stacklevel=2 + ) + + # Write another byte, finalize and close. + writer.write(TEST_BINARY_DATA[32:33]) + writer.close() + self.assertEqual(upload.transmit_next_chunk.call_count, 5) + def test_rejects_invalid_kwargs(self): blob = mock.Mock() with self.assertRaises(ValueError): @@ -606,5 +845,6 @@ def test_write(self): None, NUM_RETRIES, chunk_size=chunk_size, + retry=None, ) upload.transmit_next_chunk.assert_called_with(transport) diff --git a/tests/unit/test_hmac_key.py b/tests/unit/test_hmac_key.py index 5761f4a96..60d0c135b 100644 --- a/tests/unit/test_hmac_key.py +++ b/tests/unit/test_hmac_key.py @@ -218,31 +218,29 @@ def test_path_w_access_id_w_explicit_project(self): expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) self.assertEqual(metadata.path, expected_path) - def test_exists_miss_no_project_set(self): + def test_exists_miss_w_defaults(self): from google.cloud.exceptions import NotFound access_id = "ACCESS-ID" - connection = mock.Mock(spec=["api_request"]) - connection.api_request.side_effect = NotFound("testing") - client = _Client(connection) + project = "PROJECT" + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.side_effect = NotFound("testing") + client.project = project metadata = self._make_one(client) metadata._properties["accessId"] = access_id - self.assertFalse(metadata.exists(timeout=42)) + self.assertFalse(metadata.exists()) - expected_path = "/projects/{}/hmacKeys/{}".format( - client.DEFAULT_PROJECT, access_id + expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - expected_kwargs = { - "method": "GET", - "path": expected_path, - "query_params": {}, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) - def test_exists_hit_w_project_set(self): + def test_exists_hit_w_explicit_w_user_project(self): project = "PROJECT-ID" access_id = "ACCESS-ID" user_project = "billed-project" @@ -252,49 +250,47 @@ def test_exists_hit_w_project_set(self): "accessId": access_id, "serviceAccountEmail": email, } - connection = mock.Mock(spec=["api_request"]) - connection.api_request.return_value = resource - client = _Client(connection) + timeout = 42 + retry = mock.Mock(spec=[]) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = resource metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project - self.assertTrue(metadata.exists()) + self.assertTrue(metadata.exists(timeout=timeout, retry=retry)) expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = { - "method": "GET", - "path": expected_path, - "query_params": {"userProject": user_project}, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) - def test_reload_miss_no_project_set(self): + def test_reload_miss_w_defaults(self): from google.cloud.exceptions import NotFound access_id = "ACCESS-ID" - connection = mock.Mock(spec=["api_request"]) - connection.api_request.side_effect = NotFound("testing") - client = _Client(connection) + project = "PROJECT" + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.side_effect = NotFound("testing") + client.project = project metadata = self._make_one(client) metadata._properties["accessId"] = access_id with self.assertRaises(NotFound): - metadata.reload(timeout=42) + metadata.reload() - expected_path = "/projects/{}/hmacKeys/{}".format( - client.DEFAULT_PROJECT, access_id + expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) + expected_query_params = {} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - expected_kwargs = { - "method": "GET", - "path": expected_path, - "query_params": {}, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) def test_reload_hit_w_project_set(self): project = "PROJECT-ID" @@ -306,55 +302,54 @@ def test_reload_hit_w_project_set(self): "accessId": access_id, "serviceAccountEmail": email, } - connection = mock.Mock(spec=["api_request"]) - connection.api_request.return_value = resource - client = _Client(connection) + timeout = 42 + retry = mock.Mock(spec=[]) + client = mock.Mock(spec=["_get_resource"]) + client._get_resource.return_value = resource metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project - metadata.reload() + metadata.reload(timeout=timeout, retry=retry) self.assertEqual(metadata._properties, resource) expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = { - "method": "GET", - "path": expected_path, - "query_params": {"userProject": user_project}, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) - def test_update_miss_no_project_set(self): + def test_update_miss_no_project_set_w_defaults(self): from google.cloud.exceptions import NotFound + project = "PROJECT" access_id = "ACCESS-ID" - connection = mock.Mock(spec=["api_request"]) - connection.api_request.side_effect = NotFound("testing") - client = _Client(connection) + client = mock.Mock(spec=["_put_resource", "project"]) + client._put_resource.side_effect = NotFound("testing") + client.project = project metadata = self._make_one(client) metadata._properties["accessId"] = access_id metadata.state = "INACTIVE" with self.assertRaises(NotFound): - metadata.update(timeout=42) + metadata.update() - expected_path = "/projects/{}/hmacKeys/{}".format( - client.DEFAULT_PROJECT, access_id + expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) + expected_data = {"state": "INACTIVE"} + expected_query_params = {} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY_IF_ETAG_IN_JSON, ) - expected_kwargs = { - "method": "PUT", - "path": expected_path, - "data": {"state": "INACTIVE"}, - "query_params": {}, - "timeout": 42, - "retry": DEFAULT_RETRY_IF_ETAG_IN_JSON, - } - connection.api_request.assert_called_once_with(**expected_kwargs) - def test_update_hit_w_project_set(self): + def test_update_hit_w_project_set_w_timeout_w_retry(self): project = "PROJECT-ID" access_id = "ACCESS-ID" user_project = "billed-project" @@ -365,86 +360,90 @@ def test_update_hit_w_project_set(self): "serviceAccountEmail": email, "state": "ACTIVE", } - connection = mock.Mock(spec=["api_request"]) - connection.api_request.return_value = resource - client = _Client(connection) + client = mock.Mock(spec=["_put_resource"]) + client._put_resource.return_value = resource metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project metadata.state = "ACTIVE" + timeout = 42 + retry = mock.Mock(spec=[]) - metadata.update() + metadata.update(timeout=42, retry=retry) self.assertEqual(metadata._properties, resource) expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = { - "method": "PUT", - "path": expected_path, - "data": {"state": "ACTIVE"}, - "query_params": {"userProject": user_project}, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY_IF_ETAG_IN_JSON, - } - connection.api_request.assert_called_once_with(**expected_kwargs) + expected_data = {"state": "ACTIVE"} + expected_query_params = {"userProject": user_project} + client._put_resource.assert_called_once_with( + expected_path, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) def test_delete_not_inactive(self): - metadata = self._make_one() + client = mock.Mock(spec=["_delete_resource", "project"]) + client.project = "PROJECT" + metadata = self._make_one(client) + for state in ("ACTIVE", "DELETED"): metadata._properties["state"] = state with self.assertRaises(ValueError): metadata.delete() - def test_delete_miss_no_project_set(self): + client._delete_resource.assert_not_called() + + def test_delete_miss_no_project_set_w_defaults(self): from google.cloud.exceptions import NotFound access_id = "ACCESS-ID" - connection = mock.Mock(spec=["api_request"]) - connection.api_request.side_effect = NotFound("testing") - client = _Client(connection) + client = mock.Mock(spec=["_delete_resource", "project"]) + client._delete_resource.side_effect = NotFound("testing") + client.project = "PROJECT" metadata = self._make_one(client) metadata._properties["accessId"] = access_id metadata.state = "INACTIVE" with self.assertRaises(NotFound): - metadata.delete(timeout=42) - - expected_path = "/projects/{}/hmacKeys/{}".format( - client.DEFAULT_PROJECT, access_id + metadata.delete() + + expected_path = "/projects/{}/hmacKeys/{}".format(client.project, access_id) + expected_query_params = {} + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=self._get_default_timeout(), + retry=DEFAULT_RETRY, ) - expected_kwargs = { - "method": "DELETE", - "path": expected_path, - "query_params": {}, - "timeout": 42, - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) - def test_delete_hit_w_project_set(self): + def test_delete_hit_w_project_set_w_explicit_timeout_retry(self): project = "PROJECT-ID" access_id = "ACCESS-ID" user_project = "billed-project" - connection = mock.Mock(spec=["api_request"]) - connection.api_request.return_value = {} - client = _Client(connection) + client = mock.Mock(spec=["_delete_resource", "project"]) + client.project = "CLIENT-PROJECT" + client._delete_resource.return_value = {} metadata = self._make_one(client, user_project=user_project) metadata._properties["accessId"] = access_id metadata._properties["projectId"] = project metadata.state = "INACTIVE" + timeout = 42 + retry = mock.Mock(spec=[]) - metadata.delete() + metadata.delete(timeout=timeout, retry=retry) expected_path = "/projects/{}/hmacKeys/{}".format(project, access_id) - expected_kwargs = { - "method": "DELETE", - "path": expected_path, - "query_params": {"userProject": user_project}, - "timeout": self._get_default_timeout(), - "retry": DEFAULT_RETRY, - } - connection.api_request.assert_called_once_with(**expected_kwargs) + expected_query_params = {"userProject": user_project} + client._delete_resource.assert_called_once_with( + expected_path, + query_params=expected_query_params, + timeout=timeout, + retry=retry, + ) class _Client(object): diff --git a/tests/unit/test_notification.py b/tests/unit/test_notification.py index 7ecabfa3a..04ffd68a1 100644 --- a/tests/unit/test_notification.py +++ b/tests/unit/test_notification.py @@ -231,7 +231,8 @@ def test_self_link(self): self.assertEqual(notification.self_link, self.SELF_LINK) def test_create_w_existing_notification_id(self): - client = self._make_client() + client = mock.Mock(spec=["_post_resource", "project"]) + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) notification._properties["id"] = self.NOTIFICATION_ID @@ -239,20 +240,23 @@ def test_create_w_existing_notification_id(self): with self.assertRaises(ValueError): notification.create() + client._post_resource.assert_not_called() + def test_create_w_defaults(self): from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT - client = self._make_client() - bucket = self._make_bucket(client) - notification = self._make_one(bucket, self.TOPIC_NAME) - api_request = client._connection.api_request - api_request.return_value = { + api_response = { "topic": self.TOPIC_REF, "id": self.NOTIFICATION_ID, "etag": self.ETAG, "selfLink": self.SELF_LINK, "payload_format": NONE_PAYLOAD_FORMAT, } + client = mock.Mock(spec=["_post_resource", "project"]) + client.project = self.BUCKET_PROJECT + client._post_resource.return_value = api_response + bucket = self._make_bucket(client) + notification = self._make_one(bucket, self.TOPIC_NAME) notification.create() @@ -264,32 +268,22 @@ def test_create_w_defaults(self): self.assertIsNone(notification.blob_name_prefix) self.assertEqual(notification.payload_format, NONE_PAYLOAD_FORMAT) - data = {"topic": self.TOPIC_REF, "payload_format": NONE_PAYLOAD_FORMAT} - api_request.assert_called_once_with( - method="POST", - path=self.CREATE_PATH, - query_params={}, - data=data, + expected_data = { + "topic": self.TOPIC_REF, + "payload_format": NONE_PAYLOAD_FORMAT, + } + expected_query_params = {} + client._post_resource.assert_called_once_with( + self.CREATE_PATH, + expected_data, + query_params=expected_query_params, timeout=self._get_default_timeout(), retry=None, ) - def test_create_w_explicit_client(self): - USER_PROJECT = "user-project-123" - client = self._make_client() - alt_client = self._make_client() - bucket = self._make_bucket(client, user_project=USER_PROJECT) - notification = self._make_one( - bucket, - self.TOPIC_NAME, - topic_project=self.TOPIC_ALT_PROJECT, - custom_attributes=self.CUSTOM_ATTRIBUTES, - event_types=self.event_types(), - blob_name_prefix=self.BLOB_NAME_PREFIX, - payload_format=self.payload_format(), - ) - api_request = alt_client._connection.api_request - api_request.return_value = { + def test_create_w_explicit_client_w_timeout_w_retry(self): + user_project = "user-project-123" + api_response = { "topic": self.TOPIC_ALT_REF, "custom_attributes": self.CUSTOM_ATTRIBUTES, "event_types": self.event_types(), @@ -299,8 +293,23 @@ def test_create_w_explicit_client(self): "etag": self.ETAG, "selfLink": self.SELF_LINK, } + bucket = self._make_bucket(client=None, user_project=user_project) + notification = self._make_one( + bucket, + self.TOPIC_NAME, + topic_project=self.TOPIC_ALT_PROJECT, + custom_attributes=self.CUSTOM_ATTRIBUTES, + event_types=self.event_types(), + blob_name_prefix=self.BLOB_NAME_PREFIX, + payload_format=self.payload_format(), + ) + client = mock.Mock(spec=["_post_resource", "project"]) + client.project = self.BUCKET_PROJECT + client._post_resource.return_value = api_response + timeout = 42 + retry = mock.Mock(spec=[]) - notification.create(client=alt_client, timeout=42) + notification.create(client=client, timeout=timeout, retry=retry) self.assertEqual(notification.custom_attributes, self.CUSTOM_ATTRIBUTES) self.assertEqual(notification.event_types, self.event_types()) @@ -310,121 +319,135 @@ def test_create_w_explicit_client(self): self.assertEqual(notification.etag, self.ETAG) self.assertEqual(notification.self_link, self.SELF_LINK) - data = { + expected_data = { "topic": self.TOPIC_ALT_REF, "custom_attributes": self.CUSTOM_ATTRIBUTES, "event_types": self.event_types(), "object_name_prefix": self.BLOB_NAME_PREFIX, "payload_format": self.payload_format(), } - api_request.assert_called_once_with( - method="POST", - path=self.CREATE_PATH, - query_params={"userProject": USER_PROJECT}, - data=data, - timeout=42, - retry=None, + expected_query_params = {"userProject": user_project} + client._post_resource.assert_called_once_with( + self.CREATE_PATH, + expected_data, + query_params=expected_query_params, + timeout=timeout, + retry=retry, ) def test_exists_wo_notification_id(self): - client = self._make_client() + client = mock.Mock(spec=["_get_resource", "project"]) + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) with self.assertRaises(ValueError): notification.exists() - def test_exists_miss(self): + client._get_resource.assert_not_called() + + def test_exists_miss_w_defaults(self): from google.cloud.exceptions import NotFound - client = self._make_client() + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.side_effect = NotFound("testing") + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.side_effect = NotFound("testing") - self.assertFalse(notification.exists(timeout=42)) + self.assertFalse(notification.exists()) - api_request.assert_called_once_with( - method="GET", - path=self.NOTIFICATION_PATH, - query_params={}, - timeout=42, + expected_query_params = {} + client._get_resource.assert_called_once_with( + self.NOTIFICATION_PATH, + query_params=expected_query_params, + timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, ) - def test_exists_hit(self): - USER_PROJECT = "user-project-123" - client = self._make_client() - bucket = self._make_bucket(client, user_project=USER_PROJECT) - notification = self._make_one(bucket, self.TOPIC_NAME) - notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.return_value = { + def test_exists_hit_w_explicit_w_user_project(self): + user_project = "user-project-123" + api_response = { "topic": self.TOPIC_REF, "id": self.NOTIFICATION_ID, "etag": self.ETAG, "selfLink": self.SELF_LINK, } + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.return_vale = api_response + client.project = self.BUCKET_PROJECT + bucket = self._make_bucket(client, user_project=user_project) + notification = self._make_one(bucket, self.TOPIC_NAME) + notification._properties["id"] = self.NOTIFICATION_ID + timeout = 42 + retry = mock.Mock(spec=[]) - self.assertTrue(notification.exists(client=client)) + self.assertTrue( + notification.exists(client=client, timeout=timeout, retry=retry) + ) - api_request.assert_called_once_with( - method="GET", - path=self.NOTIFICATION_PATH, - query_params={"userProject": USER_PROJECT}, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + self.NOTIFICATION_PATH, + query_params=expected_query_params, + timeout=timeout, + retry=retry, ) def test_reload_wo_notification_id(self): - client = self._make_client() + client = mock.Mock(spec=["_get_resource", "project"]) + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) with self.assertRaises(ValueError): notification.reload() - def test_reload_miss(self): + client._get_resource.assert_not_called() + + def test_reload_miss_w_defaults(self): from google.cloud.exceptions import NotFound - client = self._make_client() + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.side_effect = NotFound("testing") + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.side_effect = NotFound("testing") with self.assertRaises(NotFound): - notification.reload(timeout=42) + notification.reload() - api_request.assert_called_once_with( - method="GET", - path=self.NOTIFICATION_PATH, - query_params={}, - timeout=42, + expected_query_params = {} + client._get_resource.assert_called_once_with( + self.NOTIFICATION_PATH, + query_params=expected_query_params, + timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, ) - def test_reload_hit(self): + def test_reload_hit_w_explicit_w_user_project(self): from google.cloud.storage.notification import NONE_PAYLOAD_FORMAT - USER_PROJECT = "user-project-123" - client = self._make_client() - bucket = self._make_bucket(client, user_project=USER_PROJECT) - notification = self._make_one(bucket, self.TOPIC_NAME) - notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.return_value = { + user_project = "user-project-123" + api_response = { "topic": self.TOPIC_REF, "id": self.NOTIFICATION_ID, "etag": self.ETAG, "selfLink": self.SELF_LINK, "payload_format": NONE_PAYLOAD_FORMAT, } + client = mock.Mock(spec=["_get_resource", "project"]) + client._get_resource.return_value = api_response + client.project = self.BUCKET_PROJECT + bucket = self._make_bucket(client, user_project=user_project) + notification = self._make_one(bucket, self.TOPIC_NAME) + notification._properties["id"] = self.NOTIFICATION_ID + timeout = 42 + retry = mock.Mock(spec=[]) - notification.reload(client=client) + notification.reload(client=client, timeout=timeout, retry=retry) self.assertEqual(notification.etag, self.ETAG) self.assertEqual(notification.self_link, self.SELF_LINK) @@ -433,60 +456,64 @@ def test_reload_hit(self): self.assertIsNone(notification.blob_name_prefix) self.assertEqual(notification.payload_format, NONE_PAYLOAD_FORMAT) - api_request.assert_called_once_with( - method="GET", - path=self.NOTIFICATION_PATH, - query_params={"userProject": USER_PROJECT}, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + expected_query_params = {"userProject": user_project} + client._get_resource.assert_called_once_with( + self.NOTIFICATION_PATH, + query_params=expected_query_params, + timeout=timeout, + retry=retry, ) def test_delete_wo_notification_id(self): - client = self._make_client() + client = mock.Mock(spec=["_delete_resource", "project"]) + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) with self.assertRaises(ValueError): notification.delete() - def test_delete_miss(self): + client._delete_resource.assert_not_called() + + def test_delete_miss_w_defaults(self): from google.cloud.exceptions import NotFound - client = self._make_client() + client = mock.Mock(spec=["_delete_resource", "project"]) + client._delete_resource.side_effect = NotFound("testing") + client.project = self.BUCKET_PROJECT bucket = self._make_bucket(client) notification = self._make_one(bucket, self.TOPIC_NAME) notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.side_effect = NotFound("testing") with self.assertRaises(NotFound): - notification.delete(timeout=42) + notification.delete() - api_request.assert_called_once_with( - method="DELETE", - path=self.NOTIFICATION_PATH, + client._delete_resource.assert_called_once_with( + self.NOTIFICATION_PATH, query_params={}, - timeout=42, + timeout=self._get_default_timeout(), retry=DEFAULT_RETRY, ) - def test_delete_hit(self): - USER_PROJECT = "user-project-123" - client = self._make_client() - bucket = self._make_bucket(client, user_project=USER_PROJECT) + def test_delete_hit_w_explicit_client_timeout_retry(self): + user_project = "user-project-123" + client = mock.Mock(spec=["_delete_resource"]) + client._delete_resource.return_value = None + bucket_client = mock.Mock(spec=["project"]) + bucket_client.project = self.BUCKET_PROJECT + bucket = self._make_bucket(bucket_client, user_project=user_project) notification = self._make_one(bucket, self.TOPIC_NAME) notification._properties["id"] = self.NOTIFICATION_ID - api_request = client._connection.api_request - api_request.return_value = None + timeout = 42 + retry = mock.Mock(spec=[]) - notification.delete(client=client) + notification.delete(client=client, timeout=timeout, retry=retry) - api_request.assert_called_once_with( - method="DELETE", - path=self.NOTIFICATION_PATH, - query_params={"userProject": USER_PROJECT}, - timeout=self._get_default_timeout(), - retry=DEFAULT_RETRY, + client._delete_resource.assert_called_once_with( + self.NOTIFICATION_PATH, + query_params={"userProject": user_project}, + timeout=timeout, + retry=retry, ) diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index 582fa8097..3111584cb 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -19,6 +19,14 @@ import mock +try: + ConnectionError +except NameError: + _HAS_STDLIB_CONNECTION_ERROR = False +else: + _HAS_STDLIB_CONNECTION_ERROR = True + + class Test_should_retry(unittest.TestCase): def _call_fut(self, exc): from google.cloud.storage import retry @@ -56,9 +64,22 @@ def test_w_google_api_call_error_miss(self): self.assertFalse(self._call_fut(exc)) def test_w_requests_connection_error(self): + import requests + + exc = requests.ConnectionError() + self.assertTrue(self._call_fut(exc)) + + def test_miss_w_stdlib_error(self): exc = ValueError("testing") self.assertFalse(self._call_fut(exc)) + @unittest.skipUnless( + _HAS_STDLIB_CONNECTION_ERROR, "No builtin 'ConnectionError' in Python 2", + ) + def test_w_stdlib_connection_error(self): + exc = ConnectionError() + self.assertTrue(self._call_fut(exc)) + class TestConditionalRetryPolicy(unittest.TestCase): def _make_one(self, retry_policy, conditional_predicate, required_kwargs):