diff --git a/.flake8 b/.flake8
index 20fe9bda2..ed9316381 100644
--- a/.flake8
+++ b/.flake8
@@ -21,6 +21,8 @@ exclude =
# Exclude generated code.
**/proto/**
**/gapic/**
+ **/services/**
+ **/types/**
*_pb2.py
# Standard linting exemptions.
diff --git a/.gitignore b/.gitignore
index 3fb06e09c..b87e1ed58 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,6 +10,7 @@
dist
build
eggs
+.eggs
parts
bin
var
@@ -49,6 +50,7 @@ bigquery/docs/generated
# Virtual environment
env/
coverage.xml
+sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
index 893925df9..45091eddd 100755
--- a/.kokoro/publish-docs.sh
+++ b/.kokoro/publish-docs.sh
@@ -13,8 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-#!/bin/bash
-
set -eo pipefail
# Disable buffering, so that the logs stream through.
diff --git a/.kokoro/release.sh b/.kokoro/release.sh
index 73f9310b6..268407736 100755
--- a/.kokoro/release.sh
+++ b/.kokoro/release.sh
@@ -13,8 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-#!/bin/bash
-
set -eo pipefail
# Start the releasetool reporter
diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg
new file mode 100644
index 000000000..37042fc67
--- /dev/null
+++ b/.kokoro/samples/lint/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "lint"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-storage/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-storage/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/lint/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg
new file mode 100644
index 000000000..50fec9649
--- /dev/null
+++ b/.kokoro/samples/lint/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/lint/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
new file mode 100644
index 000000000..6287c9952
--- /dev/null
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.6"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-storage/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-storage/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg
new file mode 100644
index 000000000..7218af149
--- /dev/null
+++ b/.kokoro/samples/python3.6/continuous.cfg
@@ -0,0 +1,7 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg
new file mode 100644
index 000000000..50fec9649
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.6/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
new file mode 100644
index 000000000..fb9bedb81
--- /dev/null
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.7"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-storage/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-storage/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.7/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg
new file mode 100644
index 000000000..50fec9649
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.7/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
new file mode 100644
index 000000000..52a03a568
--- /dev/null
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -0,0 +1,34 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.8"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-storage/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-storage/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.8/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg
new file mode 100644
index 000000000..50fec9649
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg
new file mode 100644
index 000000000..a1c8d9759
--- /dev/null
+++ b/.kokoro/samples/python3.8/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
new file mode 100755
index 000000000..511ce7901
--- /dev/null
+++ b/.kokoro/test-samples.sh
@@ -0,0 +1,104 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-storage
+
+# Run periodic samples tests at latest release
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ LATEST_RELEASE=$(git describe --abbrev=0 --tags)
+ git checkout $LATEST_RELEASE
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the Build Cop Bot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ $KOKORO_GFILE_DIR/linux_amd64/buildcop
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 3addf68b8..0e117c6b1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,38 @@
[1]: https://pypi.org/project/google-cloud-storage/#history
+## [1.30.0](https://www.github.com/googleapis/python-storage/compare/v1.29.0...v1.30.0) (2020-07-24)
+
+
+### Features
+
+* add timeouts to Blob methods where missing ([#185](https://www.github.com/googleapis/python-storage/issues/185)) ([6eeb855](https://www.github.com/googleapis/python-storage/commit/6eeb855aa0e6a7835d1d4f6e72951e43af22ab57))
+* auto-populate standard headers for non-chunked downloads ([#204](https://www.github.com/googleapis/python-storage/issues/204)) ([d8432cd](https://www.github.com/googleapis/python-storage/commit/d8432cd65a4e9b38eebd1ade2ff00f2f44bb0ef6)), closes [#24](https://www.github.com/googleapis/python-storage/issues/24)
+* migrate to Service Account Credentials API ([#189](https://www.github.com/googleapis/python-storage/issues/189)) ([e4990d0](https://www.github.com/googleapis/python-storage/commit/e4990d06043dbd8d1a417f3a1a67fe8746071f1c))
+
+
+### Bug Fixes
+
+* add multiprocessing.rst to synthool excludes ([#186](https://www.github.com/googleapis/python-storage/issues/186)) ([4d76e38](https://www.github.com/googleapis/python-storage/commit/4d76e3882210ed2818a43256265f6df31348d410))
+
+
+### Documentation
+
+* fix indent in code blocks ([#171](https://www.github.com/googleapis/python-storage/issues/171)) ([62d1543](https://www.github.com/googleapis/python-storage/commit/62d1543e18040b286b23464562aa6eb998074c54)), closes [#170](https://www.github.com/googleapis/python-storage/issues/170)
+* remove doubled word in docstring ([#209](https://www.github.com/googleapis/python-storage/issues/209)) ([7a4e7a5](https://www.github.com/googleapis/python-storage/commit/7a4e7a5974abedb0b7b2e110cacbfcd0a40346b6))
+
+
+### Documentation
+
+* fix indent in code blocks ([#171](https://www.github.com/googleapis/python-storage/issues/171)) ([62d1543](https://www.github.com/googleapis/python-storage/commit/62d1543e18040b286b23464562aa6eb998074c54)), closes [#170](https://www.github.com/googleapis/python-storage/issues/170)
+* remove doubled word in docstring ([#209](https://www.github.com/googleapis/python-storage/issues/209)) ([7a4e7a5](https://www.github.com/googleapis/python-storage/commit/7a4e7a5974abedb0b7b2e110cacbfcd0a40346b6))
+
+
+### Dependencies
+
+* prep for grmp-1.0.0 release ([#211](https://www.github.com/googleapis/python-storage/issues/211)) ([55bae9a](https://www.github.com/googleapis/python-storage/commit/55bae9a0e7c0db512c10c6b3b621cd2ef05c9729))
+
+
## [1.29.0](https://www.github.com/googleapis/python-storage/compare/v1.28.1...v1.29.0) (2020-06-09)
@@ -20,11 +52,11 @@
### Bug Fixes
-* **storage:** add documentaion of list_blobs with user project ([#147](https://www.github.com/googleapis/python-storage/issues/147)) ([792b21f](https://www.github.com/googleapis/python-storage/commit/792b21fd2263b518d56f79cab6a4a1bb06c6e4e7))
-* **storage:** add projection parameter to blob.reload method ([#146](https://www.github.com/googleapis/python-storage/issues/146)) ([ddad20b](https://www.github.com/googleapis/python-storage/commit/ddad20b3c3d2e6bf482e34dad85fa4b0ff90e1b1))
-* **storage:** add unused variables to method generation match ([#152](https://www.github.com/googleapis/python-storage/issues/152)) ([f6574bb](https://www.github.com/googleapis/python-storage/commit/f6574bb84c60c30989d05dba97b423579360cdb2))
-* **storage:** change the method names in snippets file ([#161](https://www.github.com/googleapis/python-storage/issues/161)) ([e516ed9](https://www.github.com/googleapis/python-storage/commit/e516ed9be518e30df4e201d3242f979c0b081086))
-* **storage:** fix upload object with bucket cmek enabled ([#158](https://www.github.com/googleapis/python-storage/issues/158)) ([5f27ffa](https://www.github.com/googleapis/python-storage/commit/5f27ffa3b1b55681453b594a0ef9e2811fc5f0c8))
+* add documentaion of list_blobs with user project ([#147](https://www.github.com/googleapis/python-storage/issues/147)) ([792b21f](https://www.github.com/googleapis/python-storage/commit/792b21fd2263b518d56f79cab6a4a1bb06c6e4e7))
+* add projection parameter to blob.reload method ([#146](https://www.github.com/googleapis/python-storage/issues/146)) ([ddad20b](https://www.github.com/googleapis/python-storage/commit/ddad20b3c3d2e6bf482e34dad85fa4b0ff90e1b1))
+* add unused variables to method generation match ([#152](https://www.github.com/googleapis/python-storage/issues/152)) ([f6574bb](https://www.github.com/googleapis/python-storage/commit/f6574bb84c60c30989d05dba97b423579360cdb2))
+* change the method names in snippets file ([#161](https://www.github.com/googleapis/python-storage/issues/161)) ([e516ed9](https://www.github.com/googleapis/python-storage/commit/e516ed9be518e30df4e201d3242f979c0b081086))
+* fix upload object with bucket cmek enabled ([#158](https://www.github.com/googleapis/python-storage/issues/158)) ([5f27ffa](https://www.github.com/googleapis/python-storage/commit/5f27ffa3b1b55681453b594a0ef9e2811fc5f0c8))
* set default POST policy scheme to "http" ([#172](https://www.github.com/googleapis/python-storage/issues/172)) ([90c020d](https://www.github.com/googleapis/python-storage/commit/90c020d69a69ebc396416e4086a2e0838932130c))
### [1.28.1](https://www.github.com/googleapis/python-storage/compare/v1.28.0...v1.28.1) (2020-04-28)
@@ -32,7 +64,7 @@
### Bug Fixes
-* **storage:** anonymous credentials for private bucket ([#107](https://www.github.com/googleapis/python-storage/issues/107)) ([6152ab4](https://www.github.com/googleapis/python-storage/commit/6152ab4067d39ba824f9b6a17b83859dd7236cec))
+* anonymous credentials for private bucket ([#107](https://www.github.com/googleapis/python-storage/issues/107)) ([6152ab4](https://www.github.com/googleapis/python-storage/commit/6152ab4067d39ba824f9b6a17b83859dd7236cec))
* add bucket name into POST policy conditions ([#118](https://www.github.com/googleapis/python-storage/issues/118)) ([311ecab](https://www.github.com/googleapis/python-storage/commit/311ecabf8acc3018cef0697dd29483693f7722b9))
## [1.28.0](https://www.github.com/googleapis/python-storage/compare/v1.27.0...v1.28.0) (2020-04-22)
@@ -40,13 +72,13 @@
### Features
-* **storage:** add arguments for *GenerationMatch uploading options ([#111](https://www.github.com/googleapis/python-storage/issues/111)) ([b11aa5f](https://www.github.com/googleapis/python-storage/commit/b11aa5f00753b094580847bc62c154ae0e584dbc))
+* add arguments for *GenerationMatch uploading options ([#111](https://www.github.com/googleapis/python-storage/issues/111)) ([b11aa5f](https://www.github.com/googleapis/python-storage/commit/b11aa5f00753b094580847bc62c154ae0e584dbc))
### Bug Fixes
-* **storage:** fix incorrect mtime by UTC offset ([#42](https://www.github.com/googleapis/python-storage/issues/42)) ([76bd652](https://www.github.com/googleapis/python-storage/commit/76bd652a3078d94e03e566b6a387fc488ab26910))
-* **storage:** remove expiration strict conversion ([#106](https://www.github.com/googleapis/python-storage/issues/106)) ([9550dad](https://www.github.com/googleapis/python-storage/commit/9550dad6e63e249110fc9dcda245061b76dacdcf)), closes [#105](https://www.github.com/googleapis/python-storage/issues/105)
+* fix incorrect mtime by UTC offset ([#42](https://www.github.com/googleapis/python-storage/issues/42)) ([76bd652](https://www.github.com/googleapis/python-storage/commit/76bd652a3078d94e03e566b6a387fc488ab26910))
+* remove expiration strict conversion ([#106](https://www.github.com/googleapis/python-storage/issues/106)) ([9550dad](https://www.github.com/googleapis/python-storage/commit/9550dad6e63e249110fc9dcda245061b76dacdcf)), closes [#105](https://www.github.com/googleapis/python-storage/issues/105)
## [1.27.0](https://www.github.com/googleapis/python-storage/compare/v1.26.0...v1.27.0) (2020-04-01)
@@ -54,35 +86,35 @@
### Features
* generate signed URLs for blobs/buckets using virtual hostname ([#58](https://www.github.com/googleapis/python-storage/issues/58)) ([23df542](https://www.github.com/googleapis/python-storage/commit/23df542d0669852b05139023d5ef1ae14a09f4c7))
-* **storage:** Add cname support for V4 signature ([#72](https://www.github.com/googleapis/python-storage/issues/72)) ([cc853af](https://www.github.com/googleapis/python-storage/commit/cc853af6bf8e44e5b16e8cdfb3a275629ffb1f27))
-* **storage:** add conformance tests for virtual hosted style signed URLs ([#83](https://www.github.com/googleapis/python-storage/issues/83)) ([5adc8b0](https://www.github.com/googleapis/python-storage/commit/5adc8b0e6ffe28185a4085cd1fc8c1b4998094aa))
-* **storage:** add get notification method ([#77](https://www.github.com/googleapis/python-storage/issues/77)) ([f602252](https://www.github.com/googleapis/python-storage/commit/f6022521bee0824e1b291211703afc5eae6c6891))
-* **storage:** improve v4 signature query parameters encoding ([#48](https://www.github.com/googleapis/python-storage/issues/48)) ([8df0b55](https://www.github.com/googleapis/python-storage/commit/8df0b554a1904787889309707f08c6b8683cad44))
+* Add cname support for V4 signature ([#72](https://www.github.com/googleapis/python-storage/issues/72)) ([cc853af](https://www.github.com/googleapis/python-storage/commit/cc853af6bf8e44e5b16e8cdfb3a275629ffb1f27))
+* add conformance tests for virtual hosted style signed URLs ([#83](https://www.github.com/googleapis/python-storage/issues/83)) ([5adc8b0](https://www.github.com/googleapis/python-storage/commit/5adc8b0e6ffe28185a4085cd1fc8c1b4998094aa))
+* add get notification method ([#77](https://www.github.com/googleapis/python-storage/issues/77)) ([f602252](https://www.github.com/googleapis/python-storage/commit/f6022521bee0824e1b291211703afc5eae6c6891))
+* improve v4 signature query parameters encoding ([#48](https://www.github.com/googleapis/python-storage/issues/48)) ([8df0b55](https://www.github.com/googleapis/python-storage/commit/8df0b554a1904787889309707f08c6b8683cad44))
### Bug Fixes
-* **storage:** fix blob metadata to None regression ([#60](https://www.github.com/googleapis/python-storage/issues/60)) ([a834d1b](https://www.github.com/googleapis/python-storage/commit/a834d1b54aa96152ced4d841c4e0c241acd2d8d8))
+* fix blob metadata to None regression ([#60](https://www.github.com/googleapis/python-storage/issues/60)) ([a834d1b](https://www.github.com/googleapis/python-storage/commit/a834d1b54aa96152ced4d841c4e0c241acd2d8d8))
* add classifer for Python 3.8 ([#63](https://www.github.com/googleapis/python-storage/issues/63)) ([1b9b6bc](https://www.github.com/googleapis/python-storage/commit/1b9b6bc2601ee336a8399266852fb850e368b30a))
* make v4 signing formatting consistent w/ spec ([#56](https://www.github.com/googleapis/python-storage/issues/56)) ([8712da8](https://www.github.com/googleapis/python-storage/commit/8712da84c93600a736e72a097c42a49b4724347d))
* use correct IAM object admin role ([#71](https://www.github.com/googleapis/python-storage/issues/71)) ([2e27edd](https://www.github.com/googleapis/python-storage/commit/2e27edd3fe65cd5e17c12bf11f2b58f611937d61))
-* **storage:** remove docstring of retrun in reload method ([#78](https://www.github.com/googleapis/python-storage/issues/78)) ([4abeb1c](https://www.github.com/googleapis/python-storage/commit/4abeb1c0810c4e5d716758536da9fc204fa4c2a9))
-* **storage:** use OrderedDict while encoding POST policy ([#95](https://www.github.com/googleapis/python-storage/issues/95)) ([df560e1](https://www.github.com/googleapis/python-storage/commit/df560e178369a6d03140e412a25af6ec7444f5a1))
+* remove docstring of retrun in reload method ([#78](https://www.github.com/googleapis/python-storage/issues/78)) ([4abeb1c](https://www.github.com/googleapis/python-storage/commit/4abeb1c0810c4e5d716758536da9fc204fa4c2a9))
+* use OrderedDict while encoding POST policy ([#95](https://www.github.com/googleapis/python-storage/issues/95)) ([df560e1](https://www.github.com/googleapis/python-storage/commit/df560e178369a6d03140e412a25af6ec7444f5a1))
## [1.26.0](https://www.github.com/googleapis/python-storage/compare/v1.25.0...v1.26.0) (2020-02-12)
### Features
-* **storage:** add support for signing URLs using token ([#9889](https://www.github.com/googleapis/google-cloud-python/issues/9889)) ([ad280bf](https://www.github.com/googleapis/python-storage/commit/ad280bf506d3d7a37c402d06eac07422a5fe80af))
+* add support for signing URLs using token ([#9889](https://www.github.com/googleapis/google-cloud-python/issues/9889)) ([ad280bf](https://www.github.com/googleapis/python-storage/commit/ad280bf506d3d7a37c402d06eac07422a5fe80af))
* add timeout parameter to public methods ([#44](https://www.github.com/googleapis/python-storage/issues/44)) ([63abf07](https://www.github.com/googleapis/python-storage/commit/63abf0778686df1caa001270dd22f9df0daf0c78))
### Bug Fixes
-* **storage:** fix documentation of max_result parameter in list_blob ([#43](https://www.github.com/googleapis/python-storage/issues/43)) ([ff15f19](https://www.github.com/googleapis/python-storage/commit/ff15f19d3a5830acdd540181dc6e9d07ca7d88ee))
-* **storage:** fix system test and change scope for iam access token ([#47](https://www.github.com/googleapis/python-storage/issues/47)) ([bc5375f](https://www.github.com/googleapis/python-storage/commit/bc5375f4c88f7e6ad1afbe7667c49d9a846e9757))
-* **tests:** remove low version error assertion from iam conditions system tests ([#53](https://www.github.com/googleapis/python-storage/issues/53)) ([8904aee](https://www.github.com/googleapis/python-storage/commit/8904aee9ad5dc01ab83e1460b6f186a739668eb7))
+* fix documentation of max_result parameter in list_blob ([#43](https://www.github.com/googleapis/python-storage/issues/43)) ([ff15f19](https://www.github.com/googleapis/python-storage/commit/ff15f19d3a5830acdd540181dc6e9d07ca7d88ee))
+* fix system test and change scope for iam access token ([#47](https://www.github.com/googleapis/python-storage/issues/47)) ([bc5375f](https://www.github.com/googleapis/python-storage/commit/bc5375f4c88f7e6ad1afbe7667c49d9a846e9757))
+* remove low version error assertion from iam conditions system tests ([#53](https://www.github.com/googleapis/python-storage/issues/53)) ([8904aee](https://www.github.com/googleapis/python-storage/commit/8904aee9ad5dc01ab83e1460b6f186a739668eb7))
## 1.25.0
diff --git a/MANIFEST.in b/MANIFEST.in
index 68855abc3..e9e29d120 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -20,3 +20,6 @@ recursive-include google *.json *.proto
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
+
+# Exclude scripts for samples readmegen
+prune scripts/readme-gen
\ No newline at end of file
diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html
index 228529efe..6316a537f 100644
--- a/docs/_templates/layout.html
+++ b/docs/_templates/layout.html
@@ -21,8 +21,8 @@
- On January 1, 2020 this library will no longer support Python 2 on the latest released version.
- Previously released library versions will continue to be available. For more information please
+ As of January 1, 2020 this library no longer supports Python 2 on the latest released version.
+ Library versions released prior to that date will continue to be available. For more information please
visit
Python 2 support on Google Cloud.
{% block body %} {% endblock %}
diff --git a/docs/conf.py b/docs/conf.py
index 1bb947c41..8d2f1e15d 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -43,7 +43,7 @@
# autodoc/autosummary flags
autoclass_content = "both"
-autodoc_default_flags = ["members"]
+autodoc_default_options = {"members": True}
autosummary_generate = True
diff --git a/google/cloud/storage/__init__.py b/google/cloud/storage/__init__.py
index 2b643fc80..2a9629dfb 100644
--- a/google/cloud/storage/__init__.py
+++ b/google/cloud/storage/__init__.py
@@ -19,6 +19,7 @@
.. literalinclude:: snippets.py
:start-after: [START storage_get_started]
:end-before: [END storage_get_started]
+ :dedent: 4
The main concepts with this API are:
diff --git a/google/cloud/storage/_signing.py b/google/cloud/storage/_signing.py
index 80c97c784..8e18bdaea 100644
--- a/google/cloud/storage/_signing.py
+++ b/google/cloud/storage/_signing.py
@@ -660,14 +660,14 @@ def _sign_message(message, access_token, service_account_email):
message = _helpers._to_bytes(message)
method = "POST"
- url = "https://iam.googleapis.com/v1/projects/-/serviceAccounts/{}:signBlob?alt=json".format(
+ url = "https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/{}:signBlob?alt=json".format(
service_account_email
)
headers = {
"Authorization": "Bearer " + access_token,
"Content-type": "application/json",
}
- body = json.dumps({"bytesToSign": base64.b64encode(message).decode("utf-8")})
+ body = json.dumps({"payload": base64.b64encode(message).decode("utf-8")})
request = requests.Request()
response = request(url=url, method=method, body=body, headers=headers)
@@ -678,7 +678,7 @@ def _sign_message(message, access_token, service_account_email):
)
data = json.loads(response.data.decode("utf-8"))
- return data["signature"]
+ return data["signedBlob"]
def _url_encode(query_params):
diff --git a/google/cloud/storage/acl.py b/google/cloud/storage/acl.py
index fb07faba9..765590f94 100644
--- a/google/cloud/storage/acl.py
+++ b/google/cloud/storage/acl.py
@@ -21,6 +21,7 @@
.. literalinclude:: snippets.py
:start-after: [START client_bucket_acl]
:end-before: [END client_bucket_acl]
+ :dedent: 4
Adding and removing permissions can be done with the following methods
@@ -52,6 +53,7 @@
.. literalinclude:: snippets.py
:start-after: [START acl_user_settings]
:end-before: [END acl_user_settings]
+ :dedent: 4
After that, you can save any changes you make with the
:func:`google.cloud.storage.acl.ACL.save` method:
@@ -59,6 +61,7 @@
.. literalinclude:: snippets.py
:start-after: [START acl_save]
:end-before: [END acl_save]
+ :dedent: 4
You can alternatively save any existing :class:`google.cloud.storage.acl.ACL`
object (whether it was created by a factory method or not) from a
@@ -67,6 +70,7 @@
.. literalinclude:: snippets.py
:start-after: [START acl_save_bucket]
:end-before: [END acl_save_bucket]
+ :dedent: 4
To get the list of ``entity`` and ``role`` for each unique pair, the
:class:`ACL` class is iterable:
@@ -74,6 +78,7 @@
.. literalinclude:: snippets.py
:start-after: [START acl_print]
:end-before: [END acl_print]
+ :dedent: 4
This list of tuples can be used as the ``entity`` and ``role`` fields
when sending metadata for ACLs to the API.
diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py
index ec6c6b08e..07a17867c 100644
--- a/google/cloud/storage/blob.py
+++ b/google/cloud/storage/blob.py
@@ -31,6 +31,7 @@
from io import BytesIO
import mimetypes
import os
+import re
import warnings
import six
@@ -187,7 +188,7 @@ def __init__(
self._bucket = bucket
self._acl = ObjectACL(self)
_raise_if_more_than_one_set(
- encryption_key=encryption_key, kms_key_name=kms_key_name,
+ encryption_key=encryption_key, kms_key_name=kms_key_name
)
self._encryption_key = encryption_key
@@ -783,6 +784,34 @@ def _get_download_url(
)
return _add_query_parameters(base_url, name_value_pairs)
+ def _extract_headers_from_download(self, response):
+ """Extract headers from a non-chunked request's http object.
+
+ This avoids the need to make a second request for commonly used
+ headers.
+
+ :type response:
+ :class requests.models.Response
+ :param response: The server response from downloading a non-chunked file
+ """
+ self.content_encoding = response.headers.get("Content-Encoding", None)
+ self.content_type = response.headers.get("Content-Type", None)
+ self.cache_control = response.headers.get("Cache-Control", None)
+ self.storage_class = response.headers.get("X-Goog-Storage-Class", None)
+ self.content_language = response.headers.get("Content-Language", None)
+ # 'X-Goog-Hash': 'crc32c=4gcgLQ==,md5=CS9tHYTtyFntzj7B9nkkJQ==',
+ x_goog_hash = response.headers.get("X-Goog-Hash", "")
+
+ digests = {}
+ for encoded_digest in x_goog_hash.split(","):
+ match = re.match(r"(crc32c|md5)=([\w\d]+)==", encoded_digest)
+ if match:
+ method, digest = match.groups()
+ digests[method] = digest
+
+ self.crc32c = digests.get("crc32c", None)
+ self.md5_hash = digests.get("md5", None)
+
def _do_download(
self,
transport,
@@ -792,6 +821,7 @@ def _do_download(
start=None,
end=None,
raw_download=False,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Perform a download without any error handling.
@@ -821,6 +851,14 @@ def _do_download(
:type raw_download: bool
:param raw_download:
(Optional) If true, download the object without any expansion.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
if self.chunk_size is None:
if raw_download:
@@ -831,8 +869,8 @@ def _do_download(
download = klass(
download_url, stream=file_obj, headers=headers, start=start, end=end
)
- download.consume(transport)
-
+ response = download.consume(transport, timeout=timeout)
+ self._extract_headers_from_download(response)
else:
if raw_download:
@@ -850,7 +888,7 @@ def _do_download(
)
while not download.finished:
- download.consume_next_chunk(transport)
+ download.consume_next_chunk(transport, timeout=timeout)
def download_to_file(
self,
@@ -863,6 +901,7 @@ def download_to_file(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Download the contents of this blob into a file-like object.
@@ -931,6 +970,14 @@ def download_to_file(
:param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
blob's current metageneration does not match the given value.
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:raises: :class:`google.cloud.exceptions.NotFound`
"""
client = self._require_client(client)
@@ -948,7 +995,14 @@ def download_to_file(
transport = self._get_transport(client)
try:
self._do_download(
- transport, file_obj, download_url, headers, start, end, raw_download
+ transport,
+ file_obj,
+ download_url,
+ headers,
+ start,
+ end,
+ raw_download,
+ timeout=timeout,
)
except resumable_media.InvalidResponse as exc:
_raise_from_invalid_response(exc)
@@ -964,6 +1018,7 @@ def download_to_filename(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Download the contents of this blob into a named file.
@@ -1008,6 +1063,14 @@ def download_to_filename(
:param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
blob's current metageneration does not match the given value.
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:raises: :class:`google.cloud.exceptions.NotFound`
"""
try:
@@ -1022,6 +1085,7 @@ def download_to_filename(
if_generation_not_match=if_generation_not_match,
if_metageneration_match=if_metageneration_match,
if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
)
except resumable_media.DataCorruption:
# Delete the corrupt downloaded file.
@@ -1046,6 +1110,7 @@ def download_as_string(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Download the contents of this blob as a bytes object.
@@ -1087,6 +1152,14 @@ def download_as_string(
:param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
blob's current metageneration does not match the given value.
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: bytes
:returns: The data stored in this blob.
@@ -1103,6 +1176,7 @@ def download_as_string(
if_generation_not_match=if_generation_not_match,
if_metageneration_match=if_metageneration_match,
if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
)
return string_buffer.getvalue()
@@ -1203,6 +1277,7 @@ def _do_multipart_upload(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Perform a multipart upload.
@@ -1256,6 +1331,14 @@ def _do_multipart_upload(
:param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
blob's current metageneration does not match the given value.
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: :class:`~requests.Response`
:returns: The "200 OK" response object returned after the multipart
upload request.
@@ -1318,7 +1401,9 @@ def _do_multipart_upload(
max_retries=num_retries
)
- response = upload.transmit(transport, data, object_metadata, content_type)
+ response = upload.transmit(
+ transport, data, object_metadata, content_type, timeout=timeout
+ )
return response
@@ -1336,6 +1421,7 @@ def _initiate_resumable_upload(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Initiate a resumable upload.
@@ -1402,6 +1488,14 @@ def _initiate_resumable_upload(
:param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
blob's current metageneration does not match the given value.
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: tuple
:returns:
Pair of
@@ -1472,6 +1566,7 @@ def _initiate_resumable_upload(
content_type,
total_bytes=size,
stream_final=False,
+ timeout=timeout,
)
return upload, transport
@@ -1488,6 +1583,7 @@ def _do_resumable_upload(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Perform a resumable upload.
@@ -1544,6 +1640,14 @@ def _do_resumable_upload(
:param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
blob's current metageneration does not match the given value.
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: :class:`~requests.Response`
:returns: The "200 OK" response object returned after the final chunk
is uploaded.
@@ -1559,10 +1663,11 @@ def _do_resumable_upload(
if_generation_not_match=if_generation_not_match,
if_metageneration_match=if_metageneration_match,
if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
)
while not upload.finished:
- response = upload.transmit_next_chunk(transport)
+ response = upload.transmit_next_chunk(transport, timeout=timeout)
return response
@@ -1578,6 +1683,7 @@ def _do_upload(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Determine an upload strategy and then perform the upload.
@@ -1635,6 +1741,14 @@ def _do_upload(
:param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
blob's current metageneration does not match the given value.
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: dict
:returns: The parsed JSON from the "200 OK" response. This will be the
**only** response in the multipart case and it will be the
@@ -1652,6 +1766,7 @@ def _do_upload(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ timeout=timeout,
)
else:
response = self._do_resumable_upload(
@@ -1665,6 +1780,7 @@ def _do_upload(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ timeout=timeout,
)
return response.json()
@@ -1682,6 +1798,7 @@ def upload_from_file(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Upload the contents of this blob from a file-like object.
@@ -1768,6 +1885,14 @@ def upload_from_file(
:param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
blob's current metageneration does not match the given value.
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:raises: :class:`~google.cloud.exceptions.GoogleCloudError`
if the upload response returns an error status.
@@ -1793,6 +1918,7 @@ def upload_from_file(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ timeout=timeout,
)
self._set_properties(created_json)
except resumable_media.InvalidResponse as exc:
@@ -1808,6 +1934,7 @@ def upload_from_filename(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Upload this blob's contents from the content of a named file.
@@ -1866,6 +1993,14 @@ def upload_from_filename(
:type if_metageneration_not_match: long
:param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
content_type = self._get_content_type(content_type, filename=filename)
@@ -1881,6 +2016,7 @@ def upload_from_filename(
if_generation_not_match=if_generation_not_match,
if_metageneration_match=if_metageneration_match,
if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
)
def upload_from_string(
@@ -1893,6 +2029,7 @@ def upload_from_string(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Upload contents of this blob from the provided string.
@@ -1946,6 +2083,14 @@ def upload_from_string(
:type if_metageneration_not_match: long
:param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
blob's current metageneration does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
data = _to_bytes(data, encoding="utf-8")
string_buffer = BytesIO(data)
@@ -1959,10 +2104,16 @@ def upload_from_string(
if_generation_not_match=if_generation_not_match,
if_metageneration_match=if_metageneration_match,
if_metageneration_not_match=if_metageneration_not_match,
+ timeout=timeout,
)
def create_resumable_upload_session(
- self, content_type=None, size=None, origin=None, client=None
+ self,
+ content_type=None,
+ size=None,
+ origin=None,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Create a resumable upload session.
@@ -2020,6 +2171,14 @@ def create_resumable_upload_session(
:param client: (Optional) The client to use. If not passed, falls back
to the ``client`` stored on the blob's bucket.
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
+
:rtype: str
:returns: The resumable upload session URL. The upload can be
completed by making an HTTP PUT request with the
@@ -2048,6 +2207,7 @@ def create_resumable_upload_session(
predefined_acl=None,
extra_headers=extra_headers,
chunk_size=self._CHUNK_SIZE_MULTIPLE,
+ timeout=timeout,
)
return upload.resumable_url
@@ -2510,6 +2670,7 @@ def update_storage_class(
if_source_generation_not_match=None,
if_source_metageneration_match=None,
if_source_metageneration_not_match=None,
+ timeout=_DEFAULT_TIMEOUT,
):
"""Update blob's storage class via a rewrite-in-place. This helper will
wait for the rewrite to complete before returning, so it may take some
@@ -2592,6 +2753,14 @@ def update_storage_class(
conditional on whether the source
object's current metageneration
does not match the given value.
+
+ :type timeout: float or tuple
+ :param timeout:
+ (Optional) The number of seconds the transport should wait for the
+ server response. Depending on the retry strategy, a request may be
+ repeated several times using the same timeout each time.
+ Can also be passed as a tuple (connect_timeout, read_timeout).
+ See :meth:`requests.Session.request` documentation for details.
"""
if new_class not in self.STORAGE_CLASSES:
raise ValueError("Invalid storage class: %s" % (new_class,))
@@ -2610,6 +2779,7 @@ def update_storage_class(
if_source_generation_not_match=if_source_generation_not_match,
if_source_metageneration_match=if_source_metageneration_match,
if_source_metageneration_not_match=if_source_metageneration_not_match,
+ timeout=timeout,
)
while token is not None:
token, _, _ = self.rewrite(
@@ -2623,6 +2793,7 @@ def update_storage_class(
if_source_generation_not_match=if_source_generation_not_match,
if_source_metageneration_match=if_source_metageneration_match,
if_source_metageneration_not_match=if_source_metageneration_not_match,
+ timeout=timeout,
)
cache_control = _scalar_property("cacheControl")
diff --git a/google/cloud/storage/bucket.py b/google/cloud/storage/bucket.py
index 228c0e2aa..ad7eaf6df 100644
--- a/google/cloud/storage/bucket.py
+++ b/google/cloud/storage/bucket.py
@@ -566,7 +566,7 @@ def from_string(cls, uri, client=None):
>>> from google.cloud import storage
>>> from google.cloud.storage.bucket import Bucket
>>> client = storage.Client()
- >>> bucket = Bucket.from_string("gs://bucket",client)
+ >>> bucket = Bucket.from_string("gs://bucket", client)
"""
scheme, netloc, path, query, frag = urlsplit(uri)
@@ -911,7 +911,7 @@ def patch(
self._properties["labels"][removed_label] = None
# Call the superclass method.
- return super(Bucket, self).patch(
+ super(Bucket, self).patch(
client=client,
timeout=timeout,
if_metageneration_match=if_metageneration_match,
@@ -968,6 +968,7 @@ def get_blob(
.. literalinclude:: snippets.py
:start-after: [START get_blob]
:end-before: [END get_blob]
+ :dedent: 4
If :attr:`user_project` is set, bills the API request to that project.
@@ -1381,6 +1382,7 @@ def delete_blob(
.. literalinclude:: snippets.py
:start-after: [START delete_blob]
:end-before: [END delete_blob]
+ :dedent: 4
If :attr:`user_project` is set, bills the API request to that project.
@@ -1431,6 +1433,7 @@ def delete_blob(
.. literalinclude:: snippets.py
:start-after: [START delete_blobs]
:end-before: [END delete_blobs]
+ :dedent: 4
"""
client = self._require_client(client)
@@ -1455,7 +1458,17 @@ def delete_blob(
timeout=timeout,
)
- def delete_blobs(self, blobs, on_error=None, client=None, timeout=_DEFAULT_TIMEOUT):
+ def delete_blobs(
+ self,
+ blobs,
+ on_error=None,
+ client=None,
+ timeout=_DEFAULT_TIMEOUT,
+ if_generation_match=None,
+ if_generation_not_match=None,
+ if_metageneration_match=None,
+ if_metageneration_not_match=None,
+ ):
"""Deletes a list of blobs from the current bucket.
Uses :meth:`delete_blob` to delete each individual blob.
@@ -1484,15 +1497,74 @@ def delete_blobs(self, blobs, on_error=None, client=None, timeout=_DEFAULT_TIMEO
Can also be passed as a tuple (connect_timeout, read_timeout).
See :meth:`requests.Session.request` documentation for details.
+ :type if_generation_match: list of long
+ :param if_generation_match: (Optional) Make the operation conditional on whether
+ the blob's current generation matches the given value.
+ Setting to 0 makes the operation succeed only if there
+ are no live versions of the blob. The list must match
+ ``blobs`` item-to-item.
+
+ :type if_generation_not_match: list of long
+ :param if_generation_not_match: (Optional) Make the operation conditional on whether
+ the blob's current generation does not match the given
+ value. If no live blob exists, the precondition fails.
+ Setting to 0 makes the operation succeed only if there
+ is a live version of the blob. The list must match
+ ``blobs`` item-to-item.
+
+ :type if_metageneration_match: list of long
+ :param if_metageneration_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration matches the given value.
+ The list must match ``blobs`` item-to-item.
+
+ :type if_metageneration_not_match: list of long
+ :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the
+ blob's current metageneration does not match the given value.
+ The list must match ``blobs`` item-to-item.
+
:raises: :class:`~google.cloud.exceptions.NotFound` (if
`on_error` is not passed).
+
+ Example:
+ Delete blobs using generation match preconditions.
+
+ >>> from google.cloud import storage
+
+ >>> client = storage.Client()
+ >>> bucket = client.bucket("bucket-name")
+
+ >>> blobs = [bucket.blob("blob-name-1"), bucket.blob("blob-name-2")]
+ >>> if_generation_match = [None] * len(blobs)
+ >>> if_generation_match[0] = "123" # precondition for "blob-name-1"
+
+ >>> bucket.delete_blobs(blobs, if_generation_match=if_generation_match)
"""
+ _raise_if_len_differs(
+ len(blobs),
+ if_generation_match=if_generation_match,
+ if_generation_not_match=if_generation_not_match,
+ if_metageneration_match=if_metageneration_match,
+ if_metageneration_not_match=if_metageneration_not_match,
+ )
+ if_generation_match = iter(if_generation_match or [])
+ if_generation_not_match = iter(if_generation_not_match or [])
+ if_metageneration_match = iter(if_metageneration_match or [])
+ if_metageneration_not_match = iter(if_metageneration_not_match or [])
+
for blob in blobs:
try:
blob_name = blob
if not isinstance(blob_name, six.string_types):
blob_name = blob.name
- self.delete_blob(blob_name, client=client, timeout=timeout)
+ self.delete_blob(
+ blob_name,
+ client=client,
+ timeout=timeout,
+ if_generation_match=next(if_generation_match, None),
+ if_generation_not_match=next(if_generation_not_match, None),
+ if_metageneration_match=next(if_metageneration_match, None),
+ if_metageneration_not_match=next(if_metageneration_not_match, None),
+ )
except NotFound:
if on_error is not None:
on_error(blob)
@@ -2036,6 +2108,7 @@ def add_lifecycle_delete_rule(self, **kw):
.. literalinclude:: snippets.py
:start-after: [START add_lifecycle_delete_rule]
:end-before: [END add_lifecycle_delete_rule]
+ :dedent: 4
:type kw: dict
:params kw: arguments passed to :class:`LifecycleRuleConditions`.
@@ -2053,6 +2126,7 @@ def add_lifecycle_set_storage_class_rule(self, storage_class, **kw):
.. literalinclude:: snippets.py
:start-after: [START add_lifecycle_set_storage_class_rule]
:end-before: [END add_lifecycle_set_storage_class_rule]
+ :dedent: 4
:type storage_class: str, one of :attr:`STORAGE_CLASSES`.
:param storage_class: new storage class to assign to matching items.
@@ -2396,12 +2470,14 @@ def configure_website(self, main_page_suffix=None, not_found_page=None):
.. literalinclude:: snippets.py
:start-after: [START configure_website]
:end-before: [END configure_website]
+ :dedent: 4
You probably should also make the whole bucket public:
.. literalinclude:: snippets.py
:start-after: [START make_public]
:end-before: [END make_public]
+ :dedent: 4
This says: "Make the bucket public, and all the stuff already in
the bucket, and anything else I add to the bucket. Just make it
@@ -2735,6 +2811,7 @@ def generate_upload_policy(self, conditions, expiration=None, client=None):
.. literalinclude:: snippets.py
:start-after: [START policy_document]
:end-before: [END policy_document]
+ :dedent: 4
.. _policy documents:
https://cloud.google.com/storage/docs/xml-api\
@@ -2980,3 +3057,23 @@ def generate_signed_url(
headers=headers,
query_parameters=query_parameters,
)
+
+
+def _raise_if_len_differs(expected_len, **generation_match_args):
+ """
+ Raise an error if any generation match argument
+ is set and its len differs from the given value.
+
+ :type expected_len: int
+ :param expected_len: Expected argument length in case it's set.
+
+ :type generation_match_args: dict
+ :param generation_match_args: Lists, which length must be checked.
+
+ :raises: :exc:`ValueError` if any argument set, but has an unexpected length.
+ """
+ for name, value in generation_match_args.items():
+ if value is not None and len(value) != expected_len:
+ raise ValueError(
+ "'{}' length must be the same as 'blobs' length".format(name)
+ )
diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py
index 4b23de04e..118377b7c 100644
--- a/google/cloud/storage/client.py
+++ b/google/cloud/storage/client.py
@@ -329,6 +329,7 @@ def get_bucket(
.. literalinclude:: snippets.py
:start-after: [START get_bucket]
:end-before: [END get_bucket]
+ :dedent: 4
Get a bucket using a resource.
@@ -367,6 +368,7 @@ def lookup_bucket(
.. literalinclude:: snippets.py
:start-after: [START lookup_bucket]
:end-before: [END lookup_bucket]
+ :dedent: 4
:type bucket_name: str
:param bucket_name: The name of the bucket to get.
@@ -461,6 +463,7 @@ def create_bucket(
.. literalinclude:: snippets.py
:start-after: [START create_bucket]
:end-before: [END create_bucket]
+ :dedent: 4
Create a bucket using a resource.
@@ -542,7 +545,7 @@ def download_blob_to_file(self, blob_or_uri, file_obj, start=None, end=None):
(Optional) The last byte in a range to be downloaded.
Examples:
- Download a blob using using a blob resource.
+ Download a blob using a blob resource.
>>> from google.cloud import storage
>>> client = storage.Client()
@@ -702,6 +705,7 @@ def list_buckets(
.. literalinclude:: snippets.py
:start-after: [START list_buckets]
:end-before: [END list_buckets]
+ :dedent: 4
This implements "storage.buckets.list".
diff --git a/noxfile.py b/noxfile.py
index 058dcdd61..fd120fd6d 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -26,11 +26,12 @@
BLACK_VERSION = "black==19.10b0"
BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"]
-if os.path.exists("samples"):
- BLACK_PATHS.append("samples")
+DEFAULT_PYTHON_VERSION = "3.8"
+SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"]
-@nox.session(python="3.8")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
"""Run linters.
@@ -38,7 +39,9 @@ def lint(session):
serious code quality issues.
"""
session.install("flake8", BLACK_VERSION)
- session.run("black", "--check", *BLACK_PATHS)
+ session.run(
+ "black", "--check", *BLACK_PATHS,
+ )
session.run("flake8", "google", "tests")
@@ -53,10 +56,12 @@ def blacken(session):
check the state of the `gcp_ubuntu_config` we use for that Kokoro run.
"""
session.install(BLACK_VERSION)
- session.run("black", *BLACK_PATHS)
+ session.run(
+ "black", *BLACK_PATHS,
+ )
-@nox.session(python="3.8")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.install("docutils", "pygments")
@@ -73,6 +78,7 @@ def default(session):
"py.test",
"--quiet",
"--cov=google.cloud.storage",
+ "--cov=google.cloud",
"--cov=tests.unit",
"--cov-append",
"--cov-config=.coveragerc",
@@ -83,13 +89,13 @@ def default(session):
)
-@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"])
+@nox.session(python=UNIT_TEST_PYTHON_VERSIONS)
def unit(session):
"""Run the unit test suite."""
default(session)
-@nox.session(python=["2.7", "3.8"])
+@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
def system(session):
"""Run the system test suite."""
system_test_path = os.path.join("tests", "system.py")
@@ -103,19 +109,21 @@ def system(session):
# Sanity check: only run tests if found.
if not system_test_exists and not system_test_folder_exists:
session.skip("System tests were not found")
- session.install("google-cloud-iam")
- session.install("google-cloud-pubsub")
- session.install("google-cloud-kms")
# Use pre-release gRPC for system tests.
session.install("--pre", "grpcio")
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install("mock", "pytest")
-
+ session.install(
+ "mock",
+ "pytest",
+ "google-cloud-testutils",
+ "google-cloud-iam",
+ "google-cloud-pubsub",
+ "google-cloud-kms",
+ )
session.install("-e", ".")
- session.install("-e", "test_utils")
# Run py.test against the system tests.
if system_test_exists:
@@ -124,7 +132,7 @@ def system(session):
session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
-@nox.session(python="3.8")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def cover(session):
"""Run the final coverage report.
@@ -132,12 +140,12 @@ def cover(session):
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
- session.run("coverage", "report", "--show-missing", "--fail-under=100")
+ session.run("coverage", "report", "--show-missing", "--fail-under=99")
session.run("coverage", "erase")
-@nox.session(python="3.8")
+@nox.session(python=DEFAULT_PYTHON_VERSION)
def docs(session):
"""Build the docs for this library."""
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
new file mode 100755
index 000000000..ff599eb2a
--- /dev/null
+++ b/scripts/decrypt-secrets.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+# Copyright 2015 Google Inc. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+ROOT=$( dirname "$DIR" )
+
+# Work from the project root.
+cd $ROOT
+
+# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
+PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
+
+gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ > testing/test-env.sh
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-service-account" \
+ > testing/service-account.json
+gcloud secrets versions access latest \
+ --secret="python-docs-samples-client-secrets" \
+ > testing/client-secrets.json
\ No newline at end of file
diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py
new file mode 100644
index 000000000..d309d6e97
--- /dev/null
+++ b/scripts/readme-gen/readme_gen.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+
+# Copyright 2016 Google Inc
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Generates READMEs using configuration defined in yaml."""
+
+import argparse
+import io
+import os
+import subprocess
+
+import jinja2
+import yaml
+
+
+jinja_env = jinja2.Environment(
+ trim_blocks=True,
+ loader=jinja2.FileSystemLoader(
+ os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates'))))
+
+README_TMPL = jinja_env.get_template('README.tmpl.rst')
+
+
+def get_help(file):
+ return subprocess.check_output(['python', file, '--help']).decode()
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('source')
+ parser.add_argument('--destination', default='README.rst')
+
+ args = parser.parse_args()
+
+ source = os.path.abspath(args.source)
+ root = os.path.dirname(source)
+ destination = os.path.join(root, args.destination)
+
+ jinja_env.globals['get_help'] = get_help
+
+ with io.open(source, 'r') as f:
+ config = yaml.load(f)
+
+ # This allows get_help to execute in the right directory.
+ os.chdir(root)
+
+ output = README_TMPL.render(config)
+
+ with io.open(destination, 'w') as f:
+ f.write(output)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst
new file mode 100644
index 000000000..4fd239765
--- /dev/null
+++ b/scripts/readme-gen/templates/README.tmpl.rst
@@ -0,0 +1,87 @@
+{# The following line is a lie. BUT! Once jinja2 is done with it, it will
+ become truth! #}
+.. This file is automatically generated. Do not edit this file directly.
+
+{{product.name}} Python Samples
+===============================================================================
+
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst
+
+
+This directory contains samples for {{product.name}}. {{product.description}}
+
+{{description}}
+
+.. _{{product.name}}: {{product.url}}
+
+{% if required_api_url %}
+To run the sample, you need to enable the API at: {{required_api_url}}
+{% endif %}
+
+{% if required_role %}
+To run the sample, you need to have `{{required_role}}` role.
+{% endif %}
+
+{{other_required_steps}}
+
+{% if setup %}
+Setup
+-------------------------------------------------------------------------------
+
+{% for section in setup %}
+
+{% include section + '.tmpl.rst' %}
+
+{% endfor %}
+{% endif %}
+
+{% if samples %}
+Samples
+-------------------------------------------------------------------------------
+
+{% for sample in samples %}
+{{sample.name}}
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+
+{% if not sample.hide_cloudshell_button %}
+.. image:: https://gstatic.com/cloudssh/images/open-btn.png
+ :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst
+{% endif %}
+
+
+{{sample.description}}
+
+To run this sample:
+
+.. code-block:: bash
+
+ $ python {{sample.file}}
+{% if sample.show_help %}
+
+ {{get_help(sample.file)|indent}}
+{% endif %}
+
+
+{% endfor %}
+{% endif %}
+
+{% if cloud_client_library %}
+
+The client library
+-------------------------------------------------------------------------------
+
+This sample uses the `Google Cloud Client Library for Python`_.
+You can read the documentation for more details on API usage and use GitHub
+to `browse the source`_ and `report issues`_.
+
+.. _Google Cloud Client Library for Python:
+ https://googlecloudplatform.github.io/google-cloud-python/
+.. _browse the source:
+ https://github.com/GoogleCloudPlatform/google-cloud-python
+.. _report issues:
+ https://github.com/GoogleCloudPlatform/google-cloud-python/issues
+
+{% endif %}
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst
new file mode 100644
index 000000000..1446b94a5
--- /dev/null
+++ b/scripts/readme-gen/templates/auth.tmpl.rst
@@ -0,0 +1,9 @@
+Authentication
+++++++++++++++
+
+This sample requires you to have authentication setup. Refer to the
+`Authentication Getting Started Guide`_ for instructions on setting up
+credentials for applications.
+
+.. _Authentication Getting Started Guide:
+ https://cloud.google.com/docs/authentication/getting-started
diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
new file mode 100644
index 000000000..11957ce27
--- /dev/null
+++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst
@@ -0,0 +1,14 @@
+Authentication
+++++++++++++++
+
+Authentication for this service is done via an `API Key`_. To obtain an API
+Key:
+
+1. Open the `Cloud Platform Console`_
+2. Make sure that billing is enabled for your project.
+3. From the **Credentials** page, create a new **API Key** or use an existing
+ one for your project.
+
+.. _API Key:
+ https://developers.google.com/api-client-library/python/guide/aaa_apikeys
+.. _Cloud Console: https://console.cloud.google.com/project?_
diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst
new file mode 100644
index 000000000..a0406dba8
--- /dev/null
+++ b/scripts/readme-gen/templates/install_deps.tmpl.rst
@@ -0,0 +1,29 @@
+Install Dependencies
+++++++++++++++++++++
+
+#. Clone python-docs-samples and change directory to the sample directory you want to use.
+
+ .. code-block:: bash
+
+ $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git
+
+#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions.
+
+ .. _Python Development Environment Setup Guide:
+ https://cloud.google.com/python/setup
+
+#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+
+ .. code-block:: bash
+
+ $ virtualenv env
+ $ source env/bin/activate
+
+#. Install the dependencies needed to run the samples.
+
+ .. code-block:: bash
+
+ $ pip install -r requirements.txt
+
+.. _pip: https://pip.pypa.io/
+.. _virtualenv: https://virtualenv.pypa.io/
diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
new file mode 100644
index 000000000..5ea33d18c
--- /dev/null
+++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst
@@ -0,0 +1,35 @@
+Install PortAudio
++++++++++++++++++
+
+Install `PortAudio`_. This is required by the `PyAudio`_ library to stream
+audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the
+platform.
+
+* For Mac OS X, you can use `Homebrew`_::
+
+ brew install portaudio
+
+ **Note**: if you encounter an error when running `pip install` that indicates
+ it can't find `portaudio.h`, try running `pip install` with the following
+ flags::
+
+ pip install --global-option='build_ext' \
+ --global-option='-I/usr/local/include' \
+ --global-option='-L/usr/local/lib' \
+ pyaudio
+
+* For Debian / Ubuntu Linux::
+
+ apt-get install portaudio19-dev python-all-dev
+
+* Windows may work without having to install PortAudio explicitly (it will get
+ installed with PyAudio).
+
+For more details, see the `PyAudio installation`_ page.
+
+
+.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/
+.. _PortAudio: http://www.portaudio.com/
+.. _PyAudio installation:
+ https://people.csail.mit.edu/hubert/pyaudio/#downloads
+.. _Homebrew: http://brew.sh
diff --git a/setup.py b/setup.py
index 0c149d303..91cb1dcc8 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,7 @@
name = "google-cloud-storage"
description = "Google Cloud Storage API client library"
-version = "1.29.0"
+version = "1.30.0"
# Should be one of:
# 'Development Status :: 3 - Alpha'
# 'Development Status :: 4 - Beta'
@@ -31,7 +31,7 @@
dependencies = [
"google-auth >= 1.11.0, < 2.0dev",
"google-cloud-core >= 1.2.0, < 2.0dev",
- "google-resumable-media >= 0.5.0, < 0.6dev",
+ "google-resumable-media >= 0.6.0, < 2.0dev",
]
extras = {}
diff --git a/synth.metadata b/synth.metadata
index 574565647..d1fc5fa12 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -1,11 +1,17 @@
{
- "updateTime": "2020-03-31T12:14:08.930178Z",
"sources": [
+ {
+ "git": {
+ "name": ".",
+ "remote": "git@github.com:googleapis/python-storage",
+ "sha": "0709ad5121098af68faf2432d8960650d238d8cd"
+ }
+ },
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "a003d8655d3ebec2bbbd5fc3898e91e152265c67"
+ "sha": "303271797a360f8a439203413f13a160f2f5b3b4"
}
}
]
diff --git a/synth.py b/synth.py
index 0b3fff8e5..296a6311b 100644
--- a/synth.py
+++ b/synth.py
@@ -19,15 +19,21 @@
import synthtool as s
from synthtool import gcp
-AUTOSYNTH_MULTIPLE_PRS = True
-AUTOSYNTH_MULTIPLE_COMMITS = True
-
common = gcp.CommonTemplates()
# ----------------------------------------------------------------------------
# Add templated files
# ----------------------------------------------------------------------------
-templated_files = common.py_library(cov_level=99)
-s.move(templated_files, excludes=["noxfile.py"])
+templated_files = common.py_library(
+ cov_level=99,
+ system_test_external_dependencies=[
+ "google-cloud-iam",
+ "google-cloud-pubsub",
+ "google-cloud-kms",
+ ],
+)
+s.move(
+ templated_files, excludes=["docs/multiprocessing.rst"],
+)
s.shell.run(["nox", "-s", "blacken"], hide_output=False)
diff --git a/testing/.gitignore b/testing/.gitignore
new file mode 100644
index 000000000..b05fbd630
--- /dev/null
+++ b/testing/.gitignore
@@ -0,0 +1,3 @@
+test-env.sh
+service-account.json
+client-secrets.json
\ No newline at end of file
diff --git a/tests/system/test_system.py b/tests/system/test_system.py
index 2afc1e515..e135538a8 100644
--- a/tests/system/test_system.py
+++ b/tests/system/test_system.py
@@ -1905,6 +1905,7 @@ def test_blob_w_explicit_kms_key_name(self):
(listed,) = list(self.bucket.list_blobs())
self.assertTrue(listed.kms_key_name.startswith(kms_key_name))
+ @RetryErrors(unittest.TestCase.failureException)
def test_bucket_w_default_kms_key_name(self):
BLOB_NAME = "default-kms-key-name"
OVERRIDE_BLOB_NAME = "override-default-kms-key-name"
diff --git a/tests/unit/test__signing.py b/tests/unit/test__signing.py
index 5f9219b29..1a3c383de 100644
--- a/tests/unit/test__signing.py
+++ b/tests/unit/test__signing.py
@@ -678,7 +678,7 @@ def _call_fut(*args, **kwargs):
def test_sign_bytes(self):
signature = "DEADBEEF"
- data = {"signature": signature}
+ data = {"signedBlob": signature}
request = make_request(200, data)
with mock.patch("google.auth.transport.requests.Request", return_value=request):
returned_signature = self._call_fut(
diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py
index 001f8801f..54aeae671 100644
--- a/tests/unit/test_blob.py
+++ b/tests/unit/test_blob.py
@@ -936,7 +936,7 @@ def _mock_requests_response(status_code, headers, content=b""):
response.request = requests.Request("POST", "http://example.com").prepare()
return response
- def _do_download_helper_wo_chunks(self, w_range, raw_download):
+ def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None):
blob_name = "blob-name"
client = mock.Mock()
bucket = _Bucket(client)
@@ -953,6 +953,13 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download):
else:
patch = mock.patch("google.cloud.storage.blob.Download")
+ if timeout is None:
+ expected_timeout = self._get_default_timeout()
+ timeout_kwarg = {}
+ else:
+ expected_timeout = timeout
+ timeout_kwarg = {"timeout": timeout}
+
with patch as patched:
if w_range:
blob._do_download(
@@ -963,6 +970,7 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download):
start=1,
end=3,
raw_download=raw_download,
+ **timeout_kwarg
)
else:
blob._do_download(
@@ -971,6 +979,7 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download):
download_url,
headers,
raw_download=raw_download,
+ **timeout_kwarg
)
if w_range:
@@ -981,7 +990,10 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download):
patched.assert_called_once_with(
download_url, stream=file_obj, headers=headers, start=None, end=None
)
- patched.return_value.consume.assert_called_once_with(transport)
+
+ patched.return_value.consume.assert_called_once_with(
+ transport, timeout=expected_timeout
+ )
def test__do_download_wo_chunks_wo_range_wo_raw(self):
self._do_download_helper_wo_chunks(w_range=False, raw_download=False)
@@ -995,7 +1007,12 @@ def test__do_download_wo_chunks_wo_range_w_raw(self):
def test__do_download_wo_chunks_w_range_w_raw(self):
self._do_download_helper_wo_chunks(w_range=True, raw_download=True)
- def _do_download_helper_w_chunks(self, w_range, raw_download):
+ def test__do_download_wo_chunks_w_custom_timeout(self):
+ self._do_download_helper_wo_chunks(
+ w_range=False, raw_download=False, timeout=9.58
+ )
+
+ def _do_download_helper_w_chunks(self, w_range, raw_download, timeout=None):
blob_name = "blob-name"
client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"])
bucket = _Bucket(client)
@@ -1010,7 +1027,7 @@ def _do_download_helper_w_chunks(self, w_range, raw_download):
download = mock.Mock(finished=False, spec=["finished", "consume_next_chunk"])
- def side_effect(_):
+ def side_effect(*args, **kwargs):
download.finished = True
download.consume_next_chunk.side_effect = side_effect
@@ -1020,6 +1037,13 @@ def side_effect(_):
else:
patch = mock.patch("google.cloud.storage.blob.ChunkedDownload")
+ if timeout is None:
+ expected_timeout = self._get_default_timeout()
+ timeout_kwarg = {}
+ else:
+ expected_timeout = timeout
+ timeout_kwarg = {"timeout": timeout}
+
with patch as patched:
patched.return_value = download
if w_range:
@@ -1031,6 +1055,7 @@ def side_effect(_):
start=1,
end=3,
raw_download=raw_download,
+ **timeout_kwarg
)
else:
blob._do_download(
@@ -1039,6 +1064,7 @@ def side_effect(_):
download_url,
headers,
raw_download=raw_download,
+ **timeout_kwarg
)
if w_range:
@@ -1049,7 +1075,9 @@ def side_effect(_):
patched.assert_called_once_with(
download_url, chunk_size, file_obj, headers=headers, start=0, end=None
)
- download.consume_next_chunk.assert_called_once_with(transport)
+ download.consume_next_chunk.assert_called_once_with(
+ transport, timeout=expected_timeout
+ )
def test__do_download_w_chunks_wo_range_wo_raw(self):
self._do_download_helper_w_chunks(w_range=False, raw_download=False)
@@ -1063,6 +1091,9 @@ def test__do_download_w_chunks_wo_range_w_raw(self):
def test__do_download_w_chunks_w_range_w_raw(self):
self._do_download_helper_w_chunks(w_range=True, raw_download=True)
+ def test__do_download_w_chunks_w_custom_timeout(self):
+ self._do_download_helper_w_chunks(w_range=True, raw_download=True, timeout=9.58)
+
def test_download_to_file_with_failure(self):
import requests
from google.resumable_media import InvalidResponse
@@ -1091,7 +1122,14 @@ def test_download_to_file_with_failure(self):
headers = {"accept-encoding": "gzip"}
blob._do_download.assert_called_once_with(
- client._http, file_obj, media_link, headers, None, None, False
+ client._http,
+ file_obj,
+ media_link,
+ headers,
+ None,
+ None,
+ False,
+ timeout=self._get_default_timeout(),
)
def test_download_to_file_wo_media_link(self):
@@ -1114,7 +1152,14 @@ def test_download_to_file_wo_media_link(self):
)
headers = {"accept-encoding": "gzip"}
blob._do_download.assert_called_once_with(
- client._http, file_obj, expected_url, headers, None, None, False
+ client._http,
+ file_obj,
+ expected_url,
+ headers,
+ None,
+ None,
+ False,
+ timeout=self._get_default_timeout(),
)
def test_download_to_file_w_generation_match(self):
@@ -1136,10 +1181,17 @@ def test_download_to_file_w_generation_match(self):
blob.download_to_file(file_obj, if_generation_not_match=GENERATION_NUMBER)
blob._do_download.assert_called_once_with(
- client._http, file_obj, EXPECTED_URL, HEADERS, None, None, False
+ client._http,
+ file_obj,
+ EXPECTED_URL,
+ HEADERS,
+ None,
+ None,
+ False,
+ timeout=self._get_default_timeout(),
)
- def _download_to_file_helper(self, use_chunks, raw_download):
+ def _download_to_file_helper(self, use_chunks, raw_download, timeout=None):
blob_name = "blob-name"
client = mock.Mock(spec=[u"_http"])
bucket = _Bucket(client)
@@ -1151,15 +1203,29 @@ def _download_to_file_helper(self, use_chunks, raw_download):
blob.chunk_size = 3
blob._do_download = mock.Mock()
+ if timeout is None:
+ expected_timeout = self._get_default_timeout()
+ timeout_kwarg = {}
+ else:
+ expected_timeout = timeout
+ timeout_kwarg = {"timeout": timeout}
+
file_obj = io.BytesIO()
if raw_download:
- blob.download_to_file(file_obj, raw_download=True)
+ blob.download_to_file(file_obj, raw_download=True, **timeout_kwarg)
else:
- blob.download_to_file(file_obj)
+ blob.download_to_file(file_obj, **timeout_kwarg)
headers = {"accept-encoding": "gzip"}
blob._do_download.assert_called_once_with(
- client._http, file_obj, media_link, headers, None, None, raw_download
+ client._http,
+ file_obj,
+ media_link,
+ headers,
+ None,
+ None,
+ raw_download,
+ timeout=expected_timeout,
)
def test_download_to_file_wo_chunks_wo_raw(self):
@@ -1174,7 +1240,12 @@ def test_download_to_file_wo_chunks_w_raw(self):
def test_download_to_file_w_chunks_w_raw(self):
self._download_to_file_helper(use_chunks=True, raw_download=True)
- def _download_to_filename_helper(self, updated, raw_download):
+ def test_download_to_file_w_custom_timeout(self):
+ self._download_to_file_helper(
+ use_chunks=False, raw_download=False, timeout=9.58
+ )
+
+ def _download_to_filename_helper(self, updated, raw_download, timeout=None):
import os
from google.cloud.storage._helpers import _convert_to_timestamp
from google.cloud._testing import _NamedTemporaryFile
@@ -1191,7 +1262,13 @@ def _download_to_filename_helper(self, updated, raw_download):
blob._do_download = mock.Mock()
with _NamedTemporaryFile() as temp:
- blob.download_to_filename(temp.name, raw_download=raw_download)
+ if timeout is None:
+ blob.download_to_filename(temp.name, raw_download=raw_download)
+ else:
+ blob.download_to_filename(
+ temp.name, raw_download=raw_download, timeout=timeout,
+ )
+
if updated is None:
self.assertIsNone(blob.updated)
else:
@@ -1202,9 +1279,18 @@ def _download_to_filename_helper(self, updated, raw_download):
updated_time = blob.updated.timestamp()
self.assertEqual(mtime, updated_time)
+ expected_timeout = self._get_default_timeout() if timeout is None else timeout
+
headers = {"accept-encoding": "gzip"}
blob._do_download.assert_called_once_with(
- client._http, mock.ANY, media_link, headers, None, None, raw_download
+ client._http,
+ mock.ANY,
+ media_link,
+ headers,
+ None,
+ None,
+ raw_download,
+ timeout=expected_timeout,
)
stream = blob._do_download.mock_calls[0].args[1]
self.assertEqual(stream.name, temp.name)
@@ -1228,7 +1314,14 @@ def test_download_to_filename_w_generation_match(self):
blob.download_to_filename(temp.name, if_generation_match=GENERATION_NUMBER)
blob._do_download.assert_called_once_with(
- client._http, mock.ANY, EXPECTED_LINK, HEADERS, None, None, False
+ client._http,
+ mock.ANY,
+ EXPECTED_LINK,
+ HEADERS,
+ None,
+ None,
+ False,
+ timeout=self._get_default_timeout(),
)
def test_download_to_filename_w_updated_wo_raw(self):
@@ -1245,6 +1338,11 @@ def test_download_to_filename_w_updated_w_raw(self):
def test_download_to_filename_wo_updated_w_raw(self):
self._download_to_filename_helper(updated=None, raw_download=True)
+ def test_download_to_filename_w_custom_timeout(self):
+ self._download_to_filename_helper(
+ updated=None, raw_download=False, timeout=9.58
+ )
+
def test_download_to_filename_corrupted(self):
from google.resumable_media import DataCorruption
@@ -1273,7 +1371,14 @@ def test_download_to_filename_corrupted(self):
headers = {"accept-encoding": "gzip"}
blob._do_download.assert_called_once_with(
- client._http, mock.ANY, media_link, headers, None, None, False
+ client._http,
+ mock.ANY,
+ media_link,
+ headers,
+ None,
+ None,
+ False,
+ timeout=self._get_default_timeout(),
)
stream = blob._do_download.mock_calls[0].args[1]
self.assertEqual(stream.name, filename)
@@ -1300,12 +1405,19 @@ def test_download_to_filename_w_key(self):
headers = {"accept-encoding": "gzip"}
headers.update(_get_encryption_headers(key))
blob._do_download.assert_called_once_with(
- client._http, mock.ANY, media_link, headers, None, None, False
+ client._http,
+ mock.ANY,
+ media_link,
+ headers,
+ None,
+ None,
+ False,
+ timeout=self._get_default_timeout(),
)
stream = blob._do_download.mock_calls[0].args[1]
self.assertEqual(stream.name, temp.name)
- def _download_as_string_helper(self, raw_download):
+ def _download_as_string_helper(self, raw_download, timeout=None):
blob_name = "blob-name"
client = mock.Mock(spec=["_http"])
bucket = _Bucket(client)
@@ -1314,16 +1426,62 @@ def _download_as_string_helper(self, raw_download):
blob = self._make_one(blob_name, bucket=bucket, properties=properties)
blob._do_download = mock.Mock()
- fetched = blob.download_as_string(raw_download=raw_download)
+ if timeout is None:
+ expected_timeout = self._get_default_timeout()
+ fetched = blob.download_as_string(raw_download=raw_download)
+ else:
+ expected_timeout = timeout
+ fetched = blob.download_as_string(
+ raw_download=raw_download, timeout=timeout
+ )
+
self.assertEqual(fetched, b"")
headers = {"accept-encoding": "gzip"}
blob._do_download.assert_called_once_with(
- client._http, mock.ANY, media_link, headers, None, None, raw_download
+ client._http,
+ mock.ANY,
+ media_link,
+ headers,
+ None,
+ None,
+ raw_download,
+ timeout=expected_timeout,
)
stream = blob._do_download.mock_calls[0].args[1]
self.assertIsInstance(stream, io.BytesIO)
+ def test_download_as_string_w_response_headers(self):
+ blob_name = "blob-name"
+ client = mock.Mock(spec=["_http"])
+ bucket = _Bucket(client)
+ media_link = "http://example.com/media/"
+ properties = {"mediaLink": media_link}
+ blob = self._make_one(blob_name, bucket=bucket, properties=properties)
+
+ response = self._mock_requests_response(
+ http_client.OK,
+ headers={
+ "Content-Type": "application/json",
+ "Content-Language": "ko-kr",
+ "Cache-Control": "max-age=1337;public",
+ "Content-Encoding": "gzip",
+ "X-Goog-Storage-Class": "STANDARD",
+ "X-Goog-Hash": "crc32c=4gcgLQ==,md5=CS9tHYTtyFntzj7B9nkkJQ==",
+ },
+ # { "x": 5 } gzipped
+ content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00",
+ )
+ blob._extract_headers_from_download(response)
+
+ self.assertEqual(blob.content_type, "application/json")
+ self.assertEqual(blob.content_language, "ko-kr")
+ self.assertEqual(blob.content_encoding, "gzip")
+ self.assertEqual(blob.cache_control, "max-age=1337;public")
+ self.assertEqual(blob.storage_class, "STANDARD")
+ self.assertEqual(blob.md5_hash, "CS9tHYTtyFntzj7B9nkkJQ")
+ self.assertEqual(blob.crc32c, "4gcgLQ")
+
def test_download_as_string_w_generation_match(self):
GENERATION_NUMBER = 6
MEDIA_LINK = "http://example.com/media/"
@@ -1347,6 +1505,7 @@ def test_download_as_string_w_generation_match(self):
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=self._get_default_timeout(),
)
def test_download_as_string_wo_raw(self):
@@ -1355,6 +1514,9 @@ def test_download_as_string_wo_raw(self):
def test_download_as_string_w_raw(self):
self._download_as_string_helper(raw_download=True)
+ def test_download_as_string_w_custom_timeout(self):
+ self._download_as_string_helper(raw_download=False, timeout=9.58)
+
def test__get_content_type_explicit(self):
blob = self._make_one(u"blob-name", bucket=None)
@@ -1471,6 +1633,7 @@ def _do_multipart_success(
if_metageneration_match=None,
if_metageneration_not_match=None,
kms_key_name=None,
+ timeout=None,
):
from six.moves.urllib.parse import urlencode
@@ -1487,6 +1650,14 @@ def _do_multipart_success(
data = b"data here hear hier"
stream = io.BytesIO(data)
content_type = u"application/xml"
+
+ if timeout is None:
+ expected_timeout = self._get_default_timeout()
+ timeout_kwarg = {}
+ else:
+ expected_timeout = timeout
+ timeout_kwarg = {"timeout": timeout}
+
response = blob._do_multipart_upload(
client,
stream,
@@ -1498,6 +1669,7 @@ def _do_multipart_success(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ **timeout_kwarg
)
# Check the mocks and the returned value.
@@ -1551,7 +1723,7 @@ def _do_multipart_success(
)
headers = {"content-type": b'multipart/related; boundary="==0=="'}
transport.request.assert_called_once_with(
- "POST", upload_url, data=payload, headers=headers, timeout=mock.ANY
+ "POST", upload_url, data=payload, headers=headers, timeout=expected_timeout
)
@mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==")
@@ -1598,6 +1770,10 @@ def test__do_multipart_upload_with_generation_match(self, mock_get_boundary):
mock_get_boundary, if_generation_match=4, if_metageneration_match=4
)
+ @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==")
+ def test__do_multipart_upload_with_custom_timeout(self, mock_get_boundary):
+ self._do_multipart_success(mock_get_boundary, timeout=9.58)
+
@mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==")
def test__do_multipart_upload_with_generation_not_match(self, mock_get_boundary):
self._do_multipart_success(
@@ -1635,6 +1811,7 @@ def _initiate_resumable_helper(
if_metageneration_not_match=None,
blob_chunk_size=786432,
kms_key_name=None,
+ timeout=None,
):
from six.moves.urllib.parse import urlencode
from google.resumable_media.requests import ResumableUpload
@@ -1665,6 +1842,14 @@ def _initiate_resumable_helper(
data = b"hello hallo halo hi-low"
stream = io.BytesIO(data)
content_type = u"text/plain"
+
+ if timeout is None:
+ expected_timeout = self._get_default_timeout()
+ timeout_kwarg = {}
+ else:
+ expected_timeout = timeout
+ timeout_kwarg = {"timeout": timeout}
+
upload, transport = blob._initiate_resumable_upload(
client,
stream,
@@ -1678,6 +1863,7 @@ def _initiate_resumable_helper(
if_generation_not_match=if_generation_not_match,
if_metageneration_match=if_metageneration_match,
if_metageneration_not_match=if_metageneration_not_match,
+ **timeout_kwarg
)
# Check the returned values.
@@ -1757,9 +1943,16 @@ def _initiate_resumable_helper(
if extra_headers is not None:
expected_headers.update(extra_headers)
transport.request.assert_called_once_with(
- "POST", upload_url, data=payload, headers=expected_headers, timeout=mock.ANY
+ "POST",
+ upload_url,
+ data=payload,
+ headers=expected_headers,
+ timeout=expected_timeout,
)
+ def test__initiate_resumable_upload_with_custom_timeout(self):
+ self._initiate_resumable_helper(timeout=9.58)
+
def test__initiate_resumable_upload_no_size(self):
self._initiate_resumable_helper()
@@ -1844,6 +2037,7 @@ def _do_resumable_upload_call0(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=None,
):
# First mock transport.request() does initiates upload.
upload_url = (
@@ -1861,7 +2055,7 @@ def _do_resumable_upload_call0(
expected_headers["x-upload-content-length"] = str(size)
payload = json.dumps({"name": blob.name}).encode("utf-8")
return mock.call(
- "POST", upload_url, data=payload, headers=expected_headers, timeout=mock.ANY
+ "POST", upload_url, data=payload, headers=expected_headers, timeout=timeout
)
@staticmethod
@@ -1876,6 +2070,7 @@ def _do_resumable_upload_call1(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=None,
):
# Second mock transport.request() does sends first chunk.
if size is None:
@@ -1893,7 +2088,7 @@ def _do_resumable_upload_call1(
resumable_url,
data=payload,
headers=expected_headers,
- timeout=mock.ANY,
+ timeout=timeout,
)
@staticmethod
@@ -1908,6 +2103,7 @@ def _do_resumable_upload_call2(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=None,
):
# Third mock transport.request() does sends last chunk.
content_range = "bytes {:d}-{:d}/{:d}".format(
@@ -1923,7 +2119,7 @@ def _do_resumable_upload_call2(
resumable_url,
data=payload,
headers=expected_headers,
- timeout=mock.ANY,
+ timeout=timeout,
)
def _do_resumable_helper(
@@ -1935,6 +2131,7 @@ def _do_resumable_helper(
if_generation_not_match=None,
if_metageneration_match=None,
if_metageneration_not_match=None,
+ timeout=None,
):
bucket = _Bucket(name="yesterday")
blob = self._make_one(u"blob-name", bucket=bucket)
@@ -1962,6 +2159,14 @@ def _do_resumable_helper(
client._connection.API_BASE_URL = "https://storage.googleapis.com"
stream = io.BytesIO(data)
content_type = u"text/html"
+
+ if timeout is None:
+ expected_timeout = self._get_default_timeout()
+ timeout_kwarg = {}
+ else:
+ expected_timeout = timeout
+ timeout_kwarg = {"timeout": timeout}
+
response = blob._do_resumable_upload(
client,
stream,
@@ -1973,6 +2178,7 @@ def _do_resumable_helper(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ **timeout_kwarg
)
# Check the returned values.
@@ -1989,6 +2195,7 @@ def _do_resumable_helper(
if_generation_not_match=if_generation_not_match,
if_metageneration_match=if_metageneration_match,
if_metageneration_not_match=if_metageneration_not_match,
+ timeout=expected_timeout,
)
call1 = self._do_resumable_upload_call1(
blob,
@@ -2001,6 +2208,7 @@ def _do_resumable_helper(
if_generation_not_match=if_generation_not_match,
if_metageneration_match=if_metageneration_match,
if_metageneration_not_match=if_metageneration_not_match,
+ timeout=expected_timeout,
)
call2 = self._do_resumable_upload_call2(
blob,
@@ -2013,9 +2221,13 @@ def _do_resumable_helper(
if_generation_not_match=if_generation_not_match,
if_metageneration_match=if_metageneration_match,
if_metageneration_not_match=if_metageneration_not_match,
+ timeout=expected_timeout,
)
self.assertEqual(transport.request.mock_calls, [call0, call1, call2])
+ def test__do_resumable_upload_with_custom_timeout(self):
+ self._do_resumable_helper(timeout=9.58)
+
def test__do_resumable_upload_no_size(self):
self._do_resumable_helper()
@@ -2038,6 +2250,7 @@ def _do_upload_helper(
if_metageneration_match=None,
if_metageneration_not_match=None,
size=None,
+ timeout=None,
):
from google.cloud.storage.blob import _MAX_MULTIPART_SIZE
@@ -2061,6 +2274,14 @@ def _do_upload_helper(
content_type = u"video/mp4"
if size is None:
size = 12345654321
+
+ if timeout is None:
+ expected_timeout = self._get_default_timeout()
+ timeout_kwarg = {}
+ else:
+ expected_timeout = timeout
+ timeout_kwarg = {"timeout": timeout}
+
# Make the request and check the mocks.
created_json = blob._do_upload(
client,
@@ -2073,6 +2294,7 @@ def _do_upload_helper(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ **timeout_kwarg
)
self.assertIs(created_json, mock.sentinel.json)
response.json.assert_called_once_with()
@@ -2088,6 +2310,7 @@ def _do_upload_helper(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ timeout=expected_timeout,
)
blob._do_resumable_upload.assert_not_called()
else:
@@ -2103,6 +2326,7 @@ def _do_upload_helper(
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ timeout=expected_timeout,
)
def test__do_upload_uses_multipart(self):
@@ -2110,12 +2334,25 @@ def test__do_upload_uses_multipart(self):
self._do_upload_helper(size=_MAX_MULTIPART_SIZE)
+ def test__do_upload_uses_multipart_w_custom_timeout(self):
+ from google.cloud.storage.blob import _MAX_MULTIPART_SIZE
+
+ self._do_upload_helper(size=_MAX_MULTIPART_SIZE, timeout=9.58)
+
def test__do_upload_uses_resumable(self):
from google.cloud.storage.blob import _MAX_MULTIPART_SIZE
chunk_size = 256 * 1024 # 256KB
self._do_upload_helper(chunk_size=chunk_size, size=_MAX_MULTIPART_SIZE + 1)
+ def test__do_upload_uses_resumable_w_custom_timeout(self):
+ from google.cloud.storage.blob import _MAX_MULTIPART_SIZE
+
+ chunk_size = 256 * 1024 # 256KB
+ self._do_upload_helper(
+ chunk_size=chunk_size, size=_MAX_MULTIPART_SIZE + 1, timeout=9.58
+ )
+
def test__do_upload_with_retry(self):
self._do_upload_helper(num_retries=20)
@@ -2150,6 +2387,8 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs):
new_updated = datetime.datetime(2017, 1, 1, 9, 9, 9, 81000, tzinfo=UTC)
self.assertEqual(blob.updated, new_updated)
+ expected_timeout = kwargs.get("timeout", self._get_default_timeout())
+
# Check the mock.
num_retries = kwargs.get("num_retries")
blob._do_upload.assert_called_once_with(
@@ -2163,6 +2402,7 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs):
if_generation_not_match,
if_metageneration_match,
if_metageneration_not_match,
+ timeout=expected_timeout,
)
return stream
@@ -2183,6 +2423,9 @@ def test_upload_from_file_with_rewind(self):
stream = self._upload_from_file_helper(rewind=True)
assert stream.tell() == 0
+ def test_upload_from_file_with_custom_timeout(self):
+ self._upload_from_file_helper(timeout=9.58)
+
def test_upload_from_file_failure(self):
import requests
@@ -2201,7 +2444,9 @@ def test_upload_from_file_failure(self):
self.assertIn(message, exc_info.exception.message)
self.assertEqual(exc_info.exception.errors, [])
- def _do_upload_mock_call_helper(self, blob, client, content_type, size):
+ def _do_upload_mock_call_helper(
+ self, blob, client, content_type, size, timeout=None
+ ):
self.assertEqual(blob._do_upload.call_count, 1)
mock_call = blob._do_upload.mock_calls[0]
call_name, pos_args, kwargs = mock_call
@@ -2216,7 +2461,9 @@ def _do_upload_mock_call_helper(self, blob, client, content_type, size):
self.assertIsNone(pos_args[7]) # if_generation_not_match
self.assertIsNone(pos_args[8]) # if_metageneration_match
self.assertIsNone(pos_args[9]) # if_metageneration_not_match
- self.assertEqual(kwargs, {})
+
+ expected_timeout = self._get_default_timeout() if timeout is None else timeout
+ self.assertEqual(kwargs, {"timeout": expected_timeout})
return pos_args[1]
@@ -2251,6 +2498,32 @@ def test_upload_from_filename(self):
self.assertEqual(stream.mode, "rb")
self.assertEqual(stream.name, temp.name)
+ def test_upload_from_filename_w_custom_timeout(self):
+ from google.cloud._testing import _NamedTemporaryFile
+
+ blob = self._make_one("blob-name", bucket=None)
+ # Mock low-level upload helper on blob (it is tested elsewhere).
+ created_json = {"metadata": {"mint": "ice-cream"}}
+ blob._do_upload = mock.Mock(return_value=created_json, spec=[])
+ # Make sure `metadata` is empty before the request.
+ self.assertIsNone(blob.metadata)
+
+ data = b"soooo much data"
+ content_type = u"image/svg+xml"
+ client = mock.sentinel.client
+ with _NamedTemporaryFile() as temp:
+ with open(temp.name, "wb") as file_obj:
+ file_obj.write(data)
+
+ blob.upload_from_filename(
+ temp.name, content_type=content_type, client=client, timeout=9.58
+ )
+
+ # Check the mock.
+ self._do_upload_mock_call_helper(
+ blob, client, content_type, len(data), timeout=9.58
+ )
+
def _upload_from_string_helper(self, data, **kwargs):
from google.cloud._helpers import _to_bytes
@@ -2272,11 +2545,19 @@ def _upload_from_string_helper(self, data, **kwargs):
# Check the mock.
payload = _to_bytes(data, encoding="utf-8")
stream = self._do_upload_mock_call_helper(
- blob, client, "text/plain", len(payload)
+ blob,
+ client,
+ "text/plain",
+ len(payload),
+ kwargs.get("timeout", self._get_default_timeout()),
)
self.assertIsInstance(stream, io.BytesIO)
self.assertEqual(stream.getvalue(), payload)
+ def test_upload_from_string_w_custom_timeout(self):
+ data = b"XB]jb\xb8tad\xe0"
+ self._upload_from_string_helper(data, timeout=9.58)
+
def test_upload_from_string_w_bytes(self):
data = b"XB]jb\xb8tad\xe0"
self._upload_from_string_helper(data)
@@ -2285,7 +2566,9 @@ def test_upload_from_string_w_text(self):
data = u"\N{snowman} \N{sailboat}"
self._upload_from_string_helper(data)
- def _create_resumable_upload_session_helper(self, origin=None, side_effect=None):
+ def _create_resumable_upload_session_helper(
+ self, origin=None, side_effect=None, timeout=None
+ ):
bucket = _Bucket(name="alex-trebek")
blob = self._make_one("blob-name", bucket=bucket)
chunk_size = 99 * blob._CHUNK_SIZE_MULTIPLE
@@ -2303,8 +2586,20 @@ def _create_resumable_upload_session_helper(self, origin=None, side_effect=None)
size = 10000
client = mock.Mock(_http=transport, _connection=_Connection, spec=[u"_http"])
client._connection.API_BASE_URL = "https://storage.googleapis.com"
+
+ if timeout is None:
+ expected_timeout = self._get_default_timeout()
+ timeout_kwarg = {}
+ else:
+ expected_timeout = timeout
+ timeout_kwarg = {"timeout": timeout}
+
new_url = blob.create_resumable_upload_session(
- content_type=content_type, size=size, origin=origin, client=client
+ content_type=content_type,
+ size=size,
+ origin=origin,
+ client=client,
+ **timeout_kwarg
)
# Check the returned value and (lack of) side-effect.
@@ -2326,12 +2621,19 @@ def _create_resumable_upload_session_helper(self, origin=None, side_effect=None)
if origin is not None:
expected_headers["Origin"] = origin
transport.request.assert_called_once_with(
- "POST", upload_url, data=payload, headers=expected_headers, timeout=mock.ANY
+ "POST",
+ upload_url,
+ data=payload,
+ headers=expected_headers,
+ timeout=expected_timeout,
)
def test_create_resumable_upload_session(self):
self._create_resumable_upload_session_helper()
+ def test_create_resumable_upload_session_with_custom_timeout(self):
+ self._create_resumable_upload_session_helper(timeout=9.58)
+
def test_create_resumable_upload_session_with_origin(self):
self._create_resumable_upload_session_helper(origin="http://google.com")
@@ -2857,7 +3159,7 @@ def test_compose_w_generation_match_bad_length(self):
with self.assertRaises(ValueError):
destination.compose(
- sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS,
+ sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS
)
with self.assertRaises(ValueError):
destination.compose(
@@ -2880,7 +3182,7 @@ def test_compose_w_generation_match_nones(self):
destination = self._make_one(DESTINATION, bucket=bucket)
destination.compose(
- sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS,
+ sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS
)
kw = connection._requested
@@ -2896,7 +3198,7 @@ def test_compose_w_generation_match_nones(self):
{
"name": source_1.name,
"objectPreconditions": {
- "ifGenerationMatch": GENERATION_NUMBERS[0],
+ "ifGenerationMatch": GENERATION_NUMBERS[0]
},
},
{"name": source_2.name},
@@ -3252,6 +3554,41 @@ def test_update_storage_class_large_file(self):
self.assertEqual(blob.storage_class, "NEARLINE")
+ def test_update_storage_class_with_custom_timeout(self):
+ BLOB_NAME = "blob-name"
+ STORAGE_CLASS = u"NEARLINE"
+ TOKEN = "TOKEN"
+ INCOMPLETE_RESPONSE = {
+ "totalBytesRewritten": 42,
+ "objectSize": 84,
+ "done": False,
+ "rewriteToken": TOKEN,
+ "resource": {"storageClass": STORAGE_CLASS},
+ }
+ COMPLETE_RESPONSE = {
+ "totalBytesRewritten": 84,
+ "objectSize": 84,
+ "done": True,
+ "resource": {"storageClass": STORAGE_CLASS},
+ }
+ response_1 = ({"status": http_client.OK}, INCOMPLETE_RESPONSE)
+ response_2 = ({"status": http_client.OK}, COMPLETE_RESPONSE)
+ connection = _Connection(response_1, response_2)
+ client = _Client(connection)
+ bucket = _Bucket(client=client)
+ blob = self._make_one(BLOB_NAME, bucket=bucket)
+
+ blob.update_storage_class("NEARLINE", timeout=9.58)
+
+ self.assertEqual(blob.storage_class, "NEARLINE")
+
+ kw = connection._requested
+ self.assertEqual(len(kw), 2)
+
+ for kw_item in kw:
+ self.assertIn("timeout", kw_item)
+ self.assertEqual(kw_item["timeout"], 9.58)
+
def test_update_storage_class_wo_encryption_key(self):
BLOB_NAME = "blob-name"
STORAGE_CLASS = u"NEARLINE"
diff --git a/tests/unit/test_bucket.py b/tests/unit/test_bucket.py
index 27bd94f1a..3c5f2e68d 100644
--- a/tests/unit/test_bucket.py
+++ b/tests/unit/test_bucket.py
@@ -1146,6 +1146,82 @@ def test_delete_blobs_hit_w_user_project(self):
self.assertEqual(kw[0]["query_params"], {"userProject": USER_PROJECT})
self.assertEqual(kw[0]["timeout"], 42)
+ def test_delete_blobs_w_generation_match(self):
+ NAME = "name"
+ BLOB_NAME = "blob-name"
+ BLOB_NAME2 = "blob-name2"
+ GENERATION_NUMBER = 6
+ GENERATION_NUMBER2 = 9
+
+ connection = _Connection({}, {})
+ client = _Client(connection)
+ bucket = self._make_one(client=client, name=NAME)
+ bucket.delete_blobs(
+ [BLOB_NAME, BLOB_NAME2],
+ timeout=42,
+ if_generation_match=[GENERATION_NUMBER, GENERATION_NUMBER2],
+ )
+ kw = connection._requested
+ self.assertEqual(len(kw), 2)
+
+ self.assertEqual(kw[0]["method"], "DELETE")
+ self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME))
+ self.assertEqual(kw[0]["timeout"], 42)
+ self.assertEqual(
+ kw[0]["query_params"], {"ifGenerationMatch": GENERATION_NUMBER}
+ )
+ self.assertEqual(kw[1]["method"], "DELETE")
+ self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME2))
+ self.assertEqual(kw[1]["timeout"], 42)
+ self.assertEqual(
+ kw[1]["query_params"], {"ifGenerationMatch": GENERATION_NUMBER2}
+ )
+
+ def test_delete_blobs_w_generation_match_wrong_len(self):
+ NAME = "name"
+ BLOB_NAME = "blob-name"
+ BLOB_NAME2 = "blob-name2"
+ GENERATION_NUMBER = 6
+
+ connection = _Connection()
+ client = _Client(connection)
+ bucket = self._make_one(client=client, name=NAME)
+ with self.assertRaises(ValueError):
+ bucket.delete_blobs(
+ [BLOB_NAME, BLOB_NAME2],
+ timeout=42,
+ if_generation_not_match=[GENERATION_NUMBER],
+ )
+
+ def test_delete_blobs_w_generation_match_none(self):
+ NAME = "name"
+ BLOB_NAME = "blob-name"
+ BLOB_NAME2 = "blob-name2"
+ GENERATION_NUMBER = 6
+ GENERATION_NUMBER2 = None
+
+ connection = _Connection({}, {})
+ client = _Client(connection)
+ bucket = self._make_one(client=client, name=NAME)
+ bucket.delete_blobs(
+ [BLOB_NAME, BLOB_NAME2],
+ timeout=42,
+ if_generation_match=[GENERATION_NUMBER, GENERATION_NUMBER2],
+ )
+ kw = connection._requested
+ self.assertEqual(len(kw), 2)
+
+ self.assertEqual(kw[0]["method"], "DELETE")
+ self.assertEqual(kw[0]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME))
+ self.assertEqual(kw[0]["timeout"], 42)
+ self.assertEqual(
+ kw[0]["query_params"], {"ifGenerationMatch": GENERATION_NUMBER}
+ )
+ self.assertEqual(kw[1]["method"], "DELETE")
+ self.assertEqual(kw[1]["path"], "/b/%s/o/%s" % (NAME, BLOB_NAME2))
+ self.assertEqual(kw[1]["timeout"], 42)
+ self.assertEqual(kw[1]["query_params"], {})
+
def test_delete_blobs_miss_no_on_error(self):
from google.cloud.exceptions import NotFound
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index 0ce3cad3c..600e11943 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -1844,7 +1844,7 @@ def test_conformance_post_policy(test_data):
in_data = test_data["policyInput"]
timestamp = datetime.datetime.strptime(in_data["timestamp"], "%Y-%m-%dT%H:%M:%SZ")
- client = Client(credentials=_DUMMY_CREDENTIALS)
+ client = Client(credentials=_DUMMY_CREDENTIALS, project="PROJECT")
# mocking time functions
with mock.patch("google.cloud.storage._signing.NOW", return_value=timestamp):