- On January 1, 2020 this library will no longer support Python 2 on the latest released version. - Previously released library versions will continue to be available. For more information please + As of January 1, 2020 this library no longer supports Python 2 on the latest released version. + Library versions released prior to that date will continue to be available. For more information please visit Python 2 support on Google Cloud.
{% block body %} {% endblock %} diff --git a/docs/conf.py b/docs/conf.py index 1bb947c41..8d2f1e15d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -43,7 +43,7 @@ # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index ec6c6b08e..d4b0956fe 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -187,7 +187,7 @@ def __init__( self._bucket = bucket self._acl = ObjectACL(self) _raise_if_more_than_one_set( - encryption_key=encryption_key, kms_key_name=kms_key_name, + encryption_key=encryption_key, kms_key_name=kms_key_name ) self._encryption_key = encryption_key diff --git a/noxfile.py b/noxfile.py index 058dcdd61..fd120fd6d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -26,11 +26,12 @@ BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -if os.path.exists("samples"): - BLACK_PATHS.append("samples") +DEFAULT_PYTHON_VERSION = "3.8" +SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. @@ -38,7 +39,9 @@ def lint(session): serious code quality issues. """ session.install("flake8", BLACK_VERSION) - session.run("black", "--check", *BLACK_PATHS) + session.run( + "black", "--check", *BLACK_PATHS, + ) session.run("flake8", "google", "tests") @@ -53,10 +56,12 @@ def blacken(session): check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) + session.run( + "black", *BLACK_PATHS, + ) -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") @@ -73,6 +78,7 @@ def default(session): "py.test", "--quiet", "--cov=google.cloud.storage", + "--cov=google.cloud", "--cov=tests.unit", "--cov-append", "--cov-config=.coveragerc", @@ -83,13 +89,13 @@ def default(session): ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=["2.7", "3.8"]) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") @@ -103,19 +109,21 @@ def system(session): # Sanity check: only run tests if found. if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - session.install("google-cloud-iam") - session.install("google-cloud-pubsub") - session.install("google-cloud-kms") # Use pre-release gRPC for system tests. session.install("--pre", "grpcio") # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest") - + session.install( + "mock", + "pytest", + "google-cloud-testutils", + "google-cloud-iam", + "google-cloud-pubsub", + "google-cloud-kms", + ) session.install("-e", ".") - session.install("-e", "test_utils") # Run py.test against the system tests. if system_test_exists: @@ -124,7 +132,7 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -132,12 +140,12 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") + session.run("coverage", "report", "--show-missing", "--fail-under=99") session.run("coverage", "erase") -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh new file mode 100755 index 000000000..ff599eb2a --- /dev/null +++ b/scripts/decrypt-secrets.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + > testing/client-secrets.json \ No newline at end of file diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py new file mode 100644 index 000000000..d309d6e97 --- /dev/null +++ b/scripts/readme-gen/readme_gen.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 000000000..4fd239765 --- /dev/null +++ b/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 000000000..1446b94a5 --- /dev/null +++ b/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 000000000..11957ce27 --- /dev/null +++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 000000000..a0406dba8 --- /dev/null +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 000000000..5ea33d18c --- /dev/null +++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/synth.metadata b/synth.metadata index 574565647..d1fc5fa12 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,11 +1,17 @@ { - "updateTime": "2020-03-31T12:14:08.930178Z", "sources": [ + { + "git": { + "name": ".", + "remote": "git@github.com:googleapis/python-storage", + "sha": "0709ad5121098af68faf2432d8960650d238d8cd" + } + }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "a003d8655d3ebec2bbbd5fc3898e91e152265c67" + "sha": "303271797a360f8a439203413f13a160f2f5b3b4" } } ] diff --git a/synth.py b/synth.py index 1ca1dcc4e..296a6311b 100644 --- a/synth.py +++ b/synth.py @@ -19,15 +19,21 @@ import synthtool as s from synthtool import gcp -AUTOSYNTH_MULTIPLE_PRS = True -AUTOSYNTH_MULTIPLE_COMMITS = True - common = gcp.CommonTemplates() # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=99) -s.move(templated_files, excludes=["noxfile.py, docs/multiprocessing.rst"]) +templated_files = common.py_library( + cov_level=99, + system_test_external_dependencies=[ + "google-cloud-iam", + "google-cloud-pubsub", + "google-cloud-kms", + ], +) +s.move( + templated_files, excludes=["docs/multiprocessing.rst"], +) s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/.gitignore b/testing/.gitignore new file mode 100644 index 000000000..b05fbd630 --- /dev/null +++ b/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 001f8801f..017e86216 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -2857,7 +2857,7 @@ def test_compose_w_generation_match_bad_length(self): with self.assertRaises(ValueError): destination.compose( - sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS, + sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS ) with self.assertRaises(ValueError): destination.compose( @@ -2880,7 +2880,7 @@ def test_compose_w_generation_match_nones(self): destination = self._make_one(DESTINATION, bucket=bucket) destination.compose( - sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS, + sources=[source_1, source_2], if_generation_match=GENERATION_NUMBERS ) kw = connection._requested @@ -2896,7 +2896,7 @@ def test_compose_w_generation_match_nones(self): { "name": source_1.name, "objectPreconditions": { - "ifGenerationMatch": GENERATION_NUMBERS[0], + "ifGenerationMatch": GENERATION_NUMBERS[0] }, }, {"name": source_2.name}, From 008224b1a8bb898450332f19fff2c7f8c0313ccb Mon Sep 17 00:00:00 2001 From: Gurov Ilya Date: Wed, 1 Jul 2020 21:01:15 +0300 Subject: [PATCH 07/13] test: add retry for default kms system test (#198) --- tests/system/test_system.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 2afc1e515..e135538a8 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -1905,6 +1905,7 @@ def test_blob_w_explicit_kms_key_name(self): (listed,) = list(self.bucket.list_blobs()) self.assertTrue(listed.kms_key_name.startswith(kms_key_name)) + @RetryErrors(unittest.TestCase.failureException) def test_bucket_w_default_kms_key_name(self): BLOB_NAME = "default-kms-key-name" OVERRIDE_BLOB_NAME = "override-default-kms-key-name" From 844ce4f8b43fa17d5849190571229cf5687282a3 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 21 Jul 2020 12:18:33 -0400 Subject: [PATCH 08/13] tests: unit tests pass w/o environ vars (#207) Closes: #206 --- tests/unit/test_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 0ce3cad3c..600e11943 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -1844,7 +1844,7 @@ def test_conformance_post_policy(test_data): in_data = test_data["policyInput"] timestamp = datetime.datetime.strptime(in_data["timestamp"], "%Y-%m-%dT%H:%M:%SZ") - client = Client(credentials=_DUMMY_CREDENTIALS) + client = Client(credentials=_DUMMY_CREDENTIALS, project="PROJECT") # mocking time functions with mock.patch("google.cloud.storage._signing.NOW", return_value=timestamp): From 7a4e7a5974abedb0b7b2e110cacbfcd0a40346b6 Mon Sep 17 00:00:00 2001 From: Chie Hayashida Date: Wed, 22 Jul 2020 02:22:41 +0900 Subject: [PATCH 09/13] docs: remove doubled word in docstring (#209) Co-authored-by: Tres Seaver --- google/cloud/storage/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/google/cloud/storage/client.py b/google/cloud/storage/client.py index 2fb7fb75c..118377b7c 100644 --- a/google/cloud/storage/client.py +++ b/google/cloud/storage/client.py @@ -545,7 +545,7 @@ def download_blob_to_file(self, blob_or_uri, file_obj, start=None, end=None): (Optional) The last byte in a range to be downloaded. Examples: - Download a blob using using a blob resource. + Download a blob using a blob resource. >>> from google.cloud import storage >>> client = storage.Client() From 6eeb855aa0e6a7835d1d4f6e72951e43af22ab57 Mon Sep 17 00:00:00 2001 From: Peter Lamut Date: Tue, 21 Jul 2020 19:58:46 +0200 Subject: [PATCH 10/13] feat: add timeouts to Blob methods where missing (#185) * feat: add timeouts to Blob methods where missing * Require google-resumable-media version 0.6.0+ --- google/cloud/storage/blob.py | 154 ++++++++++++++- setup.py | 2 +- tests/unit/test_blob.py | 370 ++++++++++++++++++++++++++++++++--- 3 files changed, 487 insertions(+), 39 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index d4b0956fe..efad9ae39 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -792,6 +792,7 @@ def _do_download( start=None, end=None, raw_download=False, + timeout=_DEFAULT_TIMEOUT, ): """Perform a download without any error handling. @@ -821,6 +822,14 @@ def _do_download( :type raw_download: bool :param raw_download: (Optional) If true, download the object without any expansion. + + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. """ if self.chunk_size is None: if raw_download: @@ -831,7 +840,7 @@ def _do_download( download = klass( download_url, stream=file_obj, headers=headers, start=start, end=end ) - download.consume(transport) + download.consume(transport, timeout=timeout) else: @@ -850,7 +859,7 @@ def _do_download( ) while not download.finished: - download.consume_next_chunk(transport) + download.consume_next_chunk(transport, timeout=timeout) def download_to_file( self, @@ -863,6 +872,7 @@ def download_to_file( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, ): """Download the contents of this blob into a file-like object. @@ -931,6 +941,14 @@ def download_to_file( :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :raises: :class:`google.cloud.exceptions.NotFound` """ client = self._require_client(client) @@ -948,7 +966,14 @@ def download_to_file( transport = self._get_transport(client) try: self._do_download( - transport, file_obj, download_url, headers, start, end, raw_download + transport, + file_obj, + download_url, + headers, + start, + end, + raw_download, + timeout=timeout, ) except resumable_media.InvalidResponse as exc: _raise_from_invalid_response(exc) @@ -964,6 +989,7 @@ def download_to_filename( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, ): """Download the contents of this blob into a named file. @@ -1008,6 +1034,14 @@ def download_to_filename( :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :raises: :class:`google.cloud.exceptions.NotFound` """ try: @@ -1022,6 +1056,7 @@ def download_to_filename( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, ) except resumable_media.DataCorruption: # Delete the corrupt downloaded file. @@ -1046,6 +1081,7 @@ def download_as_string( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, ): """Download the contents of this blob as a bytes object. @@ -1087,6 +1123,14 @@ def download_as_string( :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :rtype: bytes :returns: The data stored in this blob. @@ -1103,6 +1147,7 @@ def download_as_string( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, ) return string_buffer.getvalue() @@ -1203,6 +1248,7 @@ def _do_multipart_upload( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + timeout=_DEFAULT_TIMEOUT, ): """Perform a multipart upload. @@ -1256,6 +1302,14 @@ def _do_multipart_upload( :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the multipart upload request. @@ -1318,7 +1372,9 @@ def _do_multipart_upload( max_retries=num_retries ) - response = upload.transmit(transport, data, object_metadata, content_type) + response = upload.transmit( + transport, data, object_metadata, content_type, timeout=timeout + ) return response @@ -1336,6 +1392,7 @@ def _initiate_resumable_upload( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, ): """Initiate a resumable upload. @@ -1402,6 +1459,14 @@ def _initiate_resumable_upload( :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :rtype: tuple :returns: Pair of @@ -1472,6 +1537,7 @@ def _initiate_resumable_upload( content_type, total_bytes=size, stream_final=False, + timeout=timeout, ) return upload, transport @@ -1488,6 +1554,7 @@ def _do_resumable_upload( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + timeout=_DEFAULT_TIMEOUT, ): """Perform a resumable upload. @@ -1544,6 +1611,14 @@ def _do_resumable_upload( :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :rtype: :class:`~requests.Response` :returns: The "200 OK" response object returned after the final chunk is uploaded. @@ -1559,10 +1634,11 @@ def _do_resumable_upload( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, ) while not upload.finished: - response = upload.transmit_next_chunk(transport) + response = upload.transmit_next_chunk(transport, timeout=timeout) return response @@ -1578,6 +1654,7 @@ def _do_upload( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + timeout=_DEFAULT_TIMEOUT, ): """Determine an upload strategy and then perform the upload. @@ -1635,6 +1712,14 @@ def _do_upload( :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :rtype: dict :returns: The parsed JSON from the "200 OK" response. This will be the **only** response in the multipart case and it will be the @@ -1652,6 +1737,7 @@ def _do_upload( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + timeout=timeout, ) else: response = self._do_resumable_upload( @@ -1665,6 +1751,7 @@ def _do_upload( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + timeout=timeout, ) return response.json() @@ -1682,6 +1769,7 @@ def upload_from_file( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, ): """Upload the contents of this blob from a file-like object. @@ -1768,6 +1856,14 @@ def upload_from_file( :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :raises: :class:`~google.cloud.exceptions.GoogleCloudError` if the upload response returns an error status. @@ -1793,6 +1889,7 @@ def upload_from_file( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + timeout=timeout, ) self._set_properties(created_json) except resumable_media.InvalidResponse as exc: @@ -1808,6 +1905,7 @@ def upload_from_filename( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, ): """Upload this blob's contents from the content of a named file. @@ -1866,6 +1964,14 @@ def upload_from_filename( :type if_metageneration_not_match: long :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. """ content_type = self._get_content_type(content_type, filename=filename) @@ -1881,6 +1987,7 @@ def upload_from_filename( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, ) def upload_from_string( @@ -1893,6 +2000,7 @@ def upload_from_string( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, ): """Upload contents of this blob from the provided string. @@ -1946,6 +2054,14 @@ def upload_from_string( :type if_metageneration_not_match: long :param if_metageneration_not_match: (Optional) Make the operation conditional on whether the blob's current metageneration does not match the given value. + + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. """ data = _to_bytes(data, encoding="utf-8") string_buffer = BytesIO(data) @@ -1959,10 +2075,16 @@ def upload_from_string( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + timeout=timeout, ) def create_resumable_upload_session( - self, content_type=None, size=None, origin=None, client=None + self, + content_type=None, + size=None, + origin=None, + client=None, + timeout=_DEFAULT_TIMEOUT, ): """Create a resumable upload session. @@ -2020,6 +2142,14 @@ def create_resumable_upload_session( :param client: (Optional) The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. + :rtype: str :returns: The resumable upload session URL. The upload can be completed by making an HTTP PUT request with the @@ -2048,6 +2178,7 @@ def create_resumable_upload_session( predefined_acl=None, extra_headers=extra_headers, chunk_size=self._CHUNK_SIZE_MULTIPLE, + timeout=timeout, ) return upload.resumable_url @@ -2510,6 +2641,7 @@ def update_storage_class( if_source_generation_not_match=None, if_source_metageneration_match=None, if_source_metageneration_not_match=None, + timeout=_DEFAULT_TIMEOUT, ): """Update blob's storage class via a rewrite-in-place. This helper will wait for the rewrite to complete before returning, so it may take some @@ -2592,6 +2724,14 @@ def update_storage_class( conditional on whether the source object's current metageneration does not match the given value. + + :type timeout: float or tuple + :param timeout: + (Optional) The number of seconds the transport should wait for the + server response. Depending on the retry strategy, a request may be + repeated several times using the same timeout each time. + Can also be passed as a tuple (connect_timeout, read_timeout). + See :meth:`requests.Session.request` documentation for details. """ if new_class not in self.STORAGE_CLASSES: raise ValueError("Invalid storage class: %s" % (new_class,)) @@ -2610,6 +2750,7 @@ def update_storage_class( if_source_generation_not_match=if_source_generation_not_match, if_source_metageneration_match=if_source_metageneration_match, if_source_metageneration_not_match=if_source_metageneration_not_match, + timeout=timeout, ) while token is not None: token, _, _ = self.rewrite( @@ -2623,6 +2764,7 @@ def update_storage_class( if_source_generation_not_match=if_source_generation_not_match, if_source_metageneration_match=if_source_metageneration_match, if_source_metageneration_not_match=if_source_metageneration_not_match, + timeout=timeout, ) cache_control = _scalar_property("cacheControl") diff --git a/setup.py b/setup.py index 0c149d303..b2ded72b6 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,7 @@ dependencies = [ "google-auth >= 1.11.0, < 2.0dev", "google-cloud-core >= 1.2.0, < 2.0dev", - "google-resumable-media >= 0.5.0, < 0.6dev", + "google-resumable-media >= 0.6.0, < 0.7dev", ] extras = {} diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 017e86216..4635b050e 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -936,7 +936,7 @@ def _mock_requests_response(status_code, headers, content=b""): response.request = requests.Request("POST", "http://example.com").prepare() return response - def _do_download_helper_wo_chunks(self, w_range, raw_download): + def _do_download_helper_wo_chunks(self, w_range, raw_download, timeout=None): blob_name = "blob-name" client = mock.Mock() bucket = _Bucket(client) @@ -953,6 +953,13 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download): else: patch = mock.patch("google.cloud.storage.blob.Download") + if timeout is None: + expected_timeout = self._get_default_timeout() + timeout_kwarg = {} + else: + expected_timeout = timeout + timeout_kwarg = {"timeout": timeout} + with patch as patched: if w_range: blob._do_download( @@ -963,6 +970,7 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download): start=1, end=3, raw_download=raw_download, + **timeout_kwarg ) else: blob._do_download( @@ -971,6 +979,7 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download): download_url, headers, raw_download=raw_download, + **timeout_kwarg ) if w_range: @@ -981,7 +990,10 @@ def _do_download_helper_wo_chunks(self, w_range, raw_download): patched.assert_called_once_with( download_url, stream=file_obj, headers=headers, start=None, end=None ) - patched.return_value.consume.assert_called_once_with(transport) + + patched.return_value.consume.assert_called_once_with( + transport, timeout=expected_timeout + ) def test__do_download_wo_chunks_wo_range_wo_raw(self): self._do_download_helper_wo_chunks(w_range=False, raw_download=False) @@ -995,7 +1007,12 @@ def test__do_download_wo_chunks_wo_range_w_raw(self): def test__do_download_wo_chunks_w_range_w_raw(self): self._do_download_helper_wo_chunks(w_range=True, raw_download=True) - def _do_download_helper_w_chunks(self, w_range, raw_download): + def test__do_download_wo_chunks_w_custom_timeout(self): + self._do_download_helper_wo_chunks( + w_range=False, raw_download=False, timeout=9.58 + ) + + def _do_download_helper_w_chunks(self, w_range, raw_download, timeout=None): blob_name = "blob-name" client = mock.Mock(_credentials=_make_credentials(), spec=["_credentials"]) bucket = _Bucket(client) @@ -1010,7 +1027,7 @@ def _do_download_helper_w_chunks(self, w_range, raw_download): download = mock.Mock(finished=False, spec=["finished", "consume_next_chunk"]) - def side_effect(_): + def side_effect(*args, **kwargs): download.finished = True download.consume_next_chunk.side_effect = side_effect @@ -1020,6 +1037,13 @@ def side_effect(_): else: patch = mock.patch("google.cloud.storage.blob.ChunkedDownload") + if timeout is None: + expected_timeout = self._get_default_timeout() + timeout_kwarg = {} + else: + expected_timeout = timeout + timeout_kwarg = {"timeout": timeout} + with patch as patched: patched.return_value = download if w_range: @@ -1031,6 +1055,7 @@ def side_effect(_): start=1, end=3, raw_download=raw_download, + **timeout_kwarg ) else: blob._do_download( @@ -1039,6 +1064,7 @@ def side_effect(_): download_url, headers, raw_download=raw_download, + **timeout_kwarg ) if w_range: @@ -1049,7 +1075,9 @@ def side_effect(_): patched.assert_called_once_with( download_url, chunk_size, file_obj, headers=headers, start=0, end=None ) - download.consume_next_chunk.assert_called_once_with(transport) + download.consume_next_chunk.assert_called_once_with( + transport, timeout=expected_timeout + ) def test__do_download_w_chunks_wo_range_wo_raw(self): self._do_download_helper_w_chunks(w_range=False, raw_download=False) @@ -1063,6 +1091,9 @@ def test__do_download_w_chunks_wo_range_w_raw(self): def test__do_download_w_chunks_w_range_w_raw(self): self._do_download_helper_w_chunks(w_range=True, raw_download=True) + def test__do_download_w_chunks_w_custom_timeout(self): + self._do_download_helper_w_chunks(w_range=True, raw_download=True, timeout=9.58) + def test_download_to_file_with_failure(self): import requests from google.resumable_media import InvalidResponse @@ -1091,7 +1122,14 @@ def test_download_to_file_with_failure(self): headers = {"accept-encoding": "gzip"} blob._do_download.assert_called_once_with( - client._http, file_obj, media_link, headers, None, None, False + client._http, + file_obj, + media_link, + headers, + None, + None, + False, + timeout=self._get_default_timeout(), ) def test_download_to_file_wo_media_link(self): @@ -1114,7 +1152,14 @@ def test_download_to_file_wo_media_link(self): ) headers = {"accept-encoding": "gzip"} blob._do_download.assert_called_once_with( - client._http, file_obj, expected_url, headers, None, None, False + client._http, + file_obj, + expected_url, + headers, + None, + None, + False, + timeout=self._get_default_timeout(), ) def test_download_to_file_w_generation_match(self): @@ -1136,10 +1181,17 @@ def test_download_to_file_w_generation_match(self): blob.download_to_file(file_obj, if_generation_not_match=GENERATION_NUMBER) blob._do_download.assert_called_once_with( - client._http, file_obj, EXPECTED_URL, HEADERS, None, None, False + client._http, + file_obj, + EXPECTED_URL, + HEADERS, + None, + None, + False, + timeout=self._get_default_timeout(), ) - def _download_to_file_helper(self, use_chunks, raw_download): + def _download_to_file_helper(self, use_chunks, raw_download, timeout=None): blob_name = "blob-name" client = mock.Mock(spec=[u"_http"]) bucket = _Bucket(client) @@ -1151,15 +1203,29 @@ def _download_to_file_helper(self, use_chunks, raw_download): blob.chunk_size = 3 blob._do_download = mock.Mock() + if timeout is None: + expected_timeout = self._get_default_timeout() + timeout_kwarg = {} + else: + expected_timeout = timeout + timeout_kwarg = {"timeout": timeout} + file_obj = io.BytesIO() if raw_download: - blob.download_to_file(file_obj, raw_download=True) + blob.download_to_file(file_obj, raw_download=True, **timeout_kwarg) else: - blob.download_to_file(file_obj) + blob.download_to_file(file_obj, **timeout_kwarg) headers = {"accept-encoding": "gzip"} blob._do_download.assert_called_once_with( - client._http, file_obj, media_link, headers, None, None, raw_download + client._http, + file_obj, + media_link, + headers, + None, + None, + raw_download, + timeout=expected_timeout, ) def test_download_to_file_wo_chunks_wo_raw(self): @@ -1174,7 +1240,12 @@ def test_download_to_file_wo_chunks_w_raw(self): def test_download_to_file_w_chunks_w_raw(self): self._download_to_file_helper(use_chunks=True, raw_download=True) - def _download_to_filename_helper(self, updated, raw_download): + def test_download_to_file_w_custom_timeout(self): + self._download_to_file_helper( + use_chunks=False, raw_download=False, timeout=9.58 + ) + + def _download_to_filename_helper(self, updated, raw_download, timeout=None): import os from google.cloud.storage._helpers import _convert_to_timestamp from google.cloud._testing import _NamedTemporaryFile @@ -1191,7 +1262,13 @@ def _download_to_filename_helper(self, updated, raw_download): blob._do_download = mock.Mock() with _NamedTemporaryFile() as temp: - blob.download_to_filename(temp.name, raw_download=raw_download) + if timeout is None: + blob.download_to_filename(temp.name, raw_download=raw_download) + else: + blob.download_to_filename( + temp.name, raw_download=raw_download, timeout=timeout, + ) + if updated is None: self.assertIsNone(blob.updated) else: @@ -1202,9 +1279,18 @@ def _download_to_filename_helper(self, updated, raw_download): updated_time = blob.updated.timestamp() self.assertEqual(mtime, updated_time) + expected_timeout = self._get_default_timeout() if timeout is None else timeout + headers = {"accept-encoding": "gzip"} blob._do_download.assert_called_once_with( - client._http, mock.ANY, media_link, headers, None, None, raw_download + client._http, + mock.ANY, + media_link, + headers, + None, + None, + raw_download, + timeout=expected_timeout, ) stream = blob._do_download.mock_calls[0].args[1] self.assertEqual(stream.name, temp.name) @@ -1228,7 +1314,14 @@ def test_download_to_filename_w_generation_match(self): blob.download_to_filename(temp.name, if_generation_match=GENERATION_NUMBER) blob._do_download.assert_called_once_with( - client._http, mock.ANY, EXPECTED_LINK, HEADERS, None, None, False + client._http, + mock.ANY, + EXPECTED_LINK, + HEADERS, + None, + None, + False, + timeout=self._get_default_timeout(), ) def test_download_to_filename_w_updated_wo_raw(self): @@ -1245,6 +1338,11 @@ def test_download_to_filename_w_updated_w_raw(self): def test_download_to_filename_wo_updated_w_raw(self): self._download_to_filename_helper(updated=None, raw_download=True) + def test_download_to_filename_w_custom_timeout(self): + self._download_to_filename_helper( + updated=None, raw_download=False, timeout=9.58 + ) + def test_download_to_filename_corrupted(self): from google.resumable_media import DataCorruption @@ -1273,7 +1371,14 @@ def test_download_to_filename_corrupted(self): headers = {"accept-encoding": "gzip"} blob._do_download.assert_called_once_with( - client._http, mock.ANY, media_link, headers, None, None, False + client._http, + mock.ANY, + media_link, + headers, + None, + None, + False, + timeout=self._get_default_timeout(), ) stream = blob._do_download.mock_calls[0].args[1] self.assertEqual(stream.name, filename) @@ -1300,12 +1405,19 @@ def test_download_to_filename_w_key(self): headers = {"accept-encoding": "gzip"} headers.update(_get_encryption_headers(key)) blob._do_download.assert_called_once_with( - client._http, mock.ANY, media_link, headers, None, None, False + client._http, + mock.ANY, + media_link, + headers, + None, + None, + False, + timeout=self._get_default_timeout(), ) stream = blob._do_download.mock_calls[0].args[1] self.assertEqual(stream.name, temp.name) - def _download_as_string_helper(self, raw_download): + def _download_as_string_helper(self, raw_download, timeout=None): blob_name = "blob-name" client = mock.Mock(spec=["_http"]) bucket = _Bucket(client) @@ -1314,12 +1426,27 @@ def _download_as_string_helper(self, raw_download): blob = self._make_one(blob_name, bucket=bucket, properties=properties) blob._do_download = mock.Mock() - fetched = blob.download_as_string(raw_download=raw_download) + if timeout is None: + expected_timeout = self._get_default_timeout() + fetched = blob.download_as_string(raw_download=raw_download) + else: + expected_timeout = timeout + fetched = blob.download_as_string( + raw_download=raw_download, timeout=timeout + ) + self.assertEqual(fetched, b"") headers = {"accept-encoding": "gzip"} blob._do_download.assert_called_once_with( - client._http, mock.ANY, media_link, headers, None, None, raw_download + client._http, + mock.ANY, + media_link, + headers, + None, + None, + raw_download, + timeout=expected_timeout, ) stream = blob._do_download.mock_calls[0].args[1] self.assertIsInstance(stream, io.BytesIO) @@ -1347,6 +1474,7 @@ def test_download_as_string_w_generation_match(self): if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=self._get_default_timeout(), ) def test_download_as_string_wo_raw(self): @@ -1355,6 +1483,9 @@ def test_download_as_string_wo_raw(self): def test_download_as_string_w_raw(self): self._download_as_string_helper(raw_download=True) + def test_download_as_string_w_custom_timeout(self): + self._download_as_string_helper(raw_download=False, timeout=9.58) + def test__get_content_type_explicit(self): blob = self._make_one(u"blob-name", bucket=None) @@ -1471,6 +1602,7 @@ def _do_multipart_success( if_metageneration_match=None, if_metageneration_not_match=None, kms_key_name=None, + timeout=None, ): from six.moves.urllib.parse import urlencode @@ -1487,6 +1619,14 @@ def _do_multipart_success( data = b"data here hear hier" stream = io.BytesIO(data) content_type = u"application/xml" + + if timeout is None: + expected_timeout = self._get_default_timeout() + timeout_kwarg = {} + else: + expected_timeout = timeout + timeout_kwarg = {"timeout": timeout} + response = blob._do_multipart_upload( client, stream, @@ -1498,6 +1638,7 @@ def _do_multipart_success( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + **timeout_kwarg ) # Check the mocks and the returned value. @@ -1551,7 +1692,7 @@ def _do_multipart_success( ) headers = {"content-type": b'multipart/related; boundary="==0=="'} transport.request.assert_called_once_with( - "POST", upload_url, data=payload, headers=headers, timeout=mock.ANY + "POST", upload_url, data=payload, headers=headers, timeout=expected_timeout ) @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") @@ -1598,6 +1739,10 @@ def test__do_multipart_upload_with_generation_match(self, mock_get_boundary): mock_get_boundary, if_generation_match=4, if_metageneration_match=4 ) + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") + def test__do_multipart_upload_with_custom_timeout(self, mock_get_boundary): + self._do_multipart_success(mock_get_boundary, timeout=9.58) + @mock.patch(u"google.resumable_media._upload.get_boundary", return_value=b"==0==") def test__do_multipart_upload_with_generation_not_match(self, mock_get_boundary): self._do_multipart_success( @@ -1635,6 +1780,7 @@ def _initiate_resumable_helper( if_metageneration_not_match=None, blob_chunk_size=786432, kms_key_name=None, + timeout=None, ): from six.moves.urllib.parse import urlencode from google.resumable_media.requests import ResumableUpload @@ -1665,6 +1811,14 @@ def _initiate_resumable_helper( data = b"hello hallo halo hi-low" stream = io.BytesIO(data) content_type = u"text/plain" + + if timeout is None: + expected_timeout = self._get_default_timeout() + timeout_kwarg = {} + else: + expected_timeout = timeout + timeout_kwarg = {"timeout": timeout} + upload, transport = blob._initiate_resumable_upload( client, stream, @@ -1678,6 +1832,7 @@ def _initiate_resumable_helper( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + **timeout_kwarg ) # Check the returned values. @@ -1757,9 +1912,16 @@ def _initiate_resumable_helper( if extra_headers is not None: expected_headers.update(extra_headers) transport.request.assert_called_once_with( - "POST", upload_url, data=payload, headers=expected_headers, timeout=mock.ANY + "POST", + upload_url, + data=payload, + headers=expected_headers, + timeout=expected_timeout, ) + def test__initiate_resumable_upload_with_custom_timeout(self): + self._initiate_resumable_helper(timeout=9.58) + def test__initiate_resumable_upload_no_size(self): self._initiate_resumable_helper() @@ -1844,6 +2006,7 @@ def _do_resumable_upload_call0( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=None, ): # First mock transport.request() does initiates upload. upload_url = ( @@ -1861,7 +2024,7 @@ def _do_resumable_upload_call0( expected_headers["x-upload-content-length"] = str(size) payload = json.dumps({"name": blob.name}).encode("utf-8") return mock.call( - "POST", upload_url, data=payload, headers=expected_headers, timeout=mock.ANY + "POST", upload_url, data=payload, headers=expected_headers, timeout=timeout ) @staticmethod @@ -1876,6 +2039,7 @@ def _do_resumable_upload_call1( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=None, ): # Second mock transport.request() does sends first chunk. if size is None: @@ -1893,7 +2057,7 @@ def _do_resumable_upload_call1( resumable_url, data=payload, headers=expected_headers, - timeout=mock.ANY, + timeout=timeout, ) @staticmethod @@ -1908,6 +2072,7 @@ def _do_resumable_upload_call2( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=None, ): # Third mock transport.request() does sends last chunk. content_range = "bytes {:d}-{:d}/{:d}".format( @@ -1923,7 +2088,7 @@ def _do_resumable_upload_call2( resumable_url, data=payload, headers=expected_headers, - timeout=mock.ANY, + timeout=timeout, ) def _do_resumable_helper( @@ -1935,6 +2100,7 @@ def _do_resumable_helper( if_generation_not_match=None, if_metageneration_match=None, if_metageneration_not_match=None, + timeout=None, ): bucket = _Bucket(name="yesterday") blob = self._make_one(u"blob-name", bucket=bucket) @@ -1962,6 +2128,14 @@ def _do_resumable_helper( client._connection.API_BASE_URL = "https://storage.googleapis.com" stream = io.BytesIO(data) content_type = u"text/html" + + if timeout is None: + expected_timeout = self._get_default_timeout() + timeout_kwarg = {} + else: + expected_timeout = timeout + timeout_kwarg = {"timeout": timeout} + response = blob._do_resumable_upload( client, stream, @@ -1973,6 +2147,7 @@ def _do_resumable_helper( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + **timeout_kwarg ) # Check the returned values. @@ -1989,6 +2164,7 @@ def _do_resumable_helper( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + timeout=expected_timeout, ) call1 = self._do_resumable_upload_call1( blob, @@ -2001,6 +2177,7 @@ def _do_resumable_helper( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + timeout=expected_timeout, ) call2 = self._do_resumable_upload_call2( blob, @@ -2013,9 +2190,13 @@ def _do_resumable_helper( if_generation_not_match=if_generation_not_match, if_metageneration_match=if_metageneration_match, if_metageneration_not_match=if_metageneration_not_match, + timeout=expected_timeout, ) self.assertEqual(transport.request.mock_calls, [call0, call1, call2]) + def test__do_resumable_upload_with_custom_timeout(self): + self._do_resumable_helper(timeout=9.58) + def test__do_resumable_upload_no_size(self): self._do_resumable_helper() @@ -2038,6 +2219,7 @@ def _do_upload_helper( if_metageneration_match=None, if_metageneration_not_match=None, size=None, + timeout=None, ): from google.cloud.storage.blob import _MAX_MULTIPART_SIZE @@ -2061,6 +2243,14 @@ def _do_upload_helper( content_type = u"video/mp4" if size is None: size = 12345654321 + + if timeout is None: + expected_timeout = self._get_default_timeout() + timeout_kwarg = {} + else: + expected_timeout = timeout + timeout_kwarg = {"timeout": timeout} + # Make the request and check the mocks. created_json = blob._do_upload( client, @@ -2073,6 +2263,7 @@ def _do_upload_helper( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + **timeout_kwarg ) self.assertIs(created_json, mock.sentinel.json) response.json.assert_called_once_with() @@ -2088,6 +2279,7 @@ def _do_upload_helper( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + timeout=expected_timeout, ) blob._do_resumable_upload.assert_not_called() else: @@ -2103,6 +2295,7 @@ def _do_upload_helper( if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + timeout=expected_timeout, ) def test__do_upload_uses_multipart(self): @@ -2110,12 +2303,25 @@ def test__do_upload_uses_multipart(self): self._do_upload_helper(size=_MAX_MULTIPART_SIZE) + def test__do_upload_uses_multipart_w_custom_timeout(self): + from google.cloud.storage.blob import _MAX_MULTIPART_SIZE + + self._do_upload_helper(size=_MAX_MULTIPART_SIZE, timeout=9.58) + def test__do_upload_uses_resumable(self): from google.cloud.storage.blob import _MAX_MULTIPART_SIZE chunk_size = 256 * 1024 # 256KB self._do_upload_helper(chunk_size=chunk_size, size=_MAX_MULTIPART_SIZE + 1) + def test__do_upload_uses_resumable_w_custom_timeout(self): + from google.cloud.storage.blob import _MAX_MULTIPART_SIZE + + chunk_size = 256 * 1024 # 256KB + self._do_upload_helper( + chunk_size=chunk_size, size=_MAX_MULTIPART_SIZE + 1, timeout=9.58 + ) + def test__do_upload_with_retry(self): self._do_upload_helper(num_retries=20) @@ -2150,6 +2356,8 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): new_updated = datetime.datetime(2017, 1, 1, 9, 9, 9, 81000, tzinfo=UTC) self.assertEqual(blob.updated, new_updated) + expected_timeout = kwargs.get("timeout", self._get_default_timeout()) + # Check the mock. num_retries = kwargs.get("num_retries") blob._do_upload.assert_called_once_with( @@ -2163,6 +2371,7 @@ def _upload_from_file_helper(self, side_effect=None, **kwargs): if_generation_not_match, if_metageneration_match, if_metageneration_not_match, + timeout=expected_timeout, ) return stream @@ -2183,6 +2392,9 @@ def test_upload_from_file_with_rewind(self): stream = self._upload_from_file_helper(rewind=True) assert stream.tell() == 0 + def test_upload_from_file_with_custom_timeout(self): + self._upload_from_file_helper(timeout=9.58) + def test_upload_from_file_failure(self): import requests @@ -2201,7 +2413,9 @@ def test_upload_from_file_failure(self): self.assertIn(message, exc_info.exception.message) self.assertEqual(exc_info.exception.errors, []) - def _do_upload_mock_call_helper(self, blob, client, content_type, size): + def _do_upload_mock_call_helper( + self, blob, client, content_type, size, timeout=None + ): self.assertEqual(blob._do_upload.call_count, 1) mock_call = blob._do_upload.mock_calls[0] call_name, pos_args, kwargs = mock_call @@ -2216,7 +2430,9 @@ def _do_upload_mock_call_helper(self, blob, client, content_type, size): self.assertIsNone(pos_args[7]) # if_generation_not_match self.assertIsNone(pos_args[8]) # if_metageneration_match self.assertIsNone(pos_args[9]) # if_metageneration_not_match - self.assertEqual(kwargs, {}) + + expected_timeout = self._get_default_timeout() if timeout is None else timeout + self.assertEqual(kwargs, {"timeout": expected_timeout}) return pos_args[1] @@ -2251,6 +2467,32 @@ def test_upload_from_filename(self): self.assertEqual(stream.mode, "rb") self.assertEqual(stream.name, temp.name) + def test_upload_from_filename_w_custom_timeout(self): + from google.cloud._testing import _NamedTemporaryFile + + blob = self._make_one("blob-name", bucket=None) + # Mock low-level upload helper on blob (it is tested elsewhere). + created_json = {"metadata": {"mint": "ice-cream"}} + blob._do_upload = mock.Mock(return_value=created_json, spec=[]) + # Make sure `metadata` is empty before the request. + self.assertIsNone(blob.metadata) + + data = b"soooo much data" + content_type = u"image/svg+xml" + client = mock.sentinel.client + with _NamedTemporaryFile() as temp: + with open(temp.name, "wb") as file_obj: + file_obj.write(data) + + blob.upload_from_filename( + temp.name, content_type=content_type, client=client, timeout=9.58 + ) + + # Check the mock. + self._do_upload_mock_call_helper( + blob, client, content_type, len(data), timeout=9.58 + ) + def _upload_from_string_helper(self, data, **kwargs): from google.cloud._helpers import _to_bytes @@ -2272,11 +2514,19 @@ def _upload_from_string_helper(self, data, **kwargs): # Check the mock. payload = _to_bytes(data, encoding="utf-8") stream = self._do_upload_mock_call_helper( - blob, client, "text/plain", len(payload) + blob, + client, + "text/plain", + len(payload), + kwargs.get("timeout", self._get_default_timeout()), ) self.assertIsInstance(stream, io.BytesIO) self.assertEqual(stream.getvalue(), payload) + def test_upload_from_string_w_custom_timeout(self): + data = b"XB]jb\xb8tad\xe0" + self._upload_from_string_helper(data, timeout=9.58) + def test_upload_from_string_w_bytes(self): data = b"XB]jb\xb8tad\xe0" self._upload_from_string_helper(data) @@ -2285,7 +2535,9 @@ def test_upload_from_string_w_text(self): data = u"\N{snowman} \N{sailboat}" self._upload_from_string_helper(data) - def _create_resumable_upload_session_helper(self, origin=None, side_effect=None): + def _create_resumable_upload_session_helper( + self, origin=None, side_effect=None, timeout=None + ): bucket = _Bucket(name="alex-trebek") blob = self._make_one("blob-name", bucket=bucket) chunk_size = 99 * blob._CHUNK_SIZE_MULTIPLE @@ -2303,8 +2555,20 @@ def _create_resumable_upload_session_helper(self, origin=None, side_effect=None) size = 10000 client = mock.Mock(_http=transport, _connection=_Connection, spec=[u"_http"]) client._connection.API_BASE_URL = "https://storage.googleapis.com" + + if timeout is None: + expected_timeout = self._get_default_timeout() + timeout_kwarg = {} + else: + expected_timeout = timeout + timeout_kwarg = {"timeout": timeout} + new_url = blob.create_resumable_upload_session( - content_type=content_type, size=size, origin=origin, client=client + content_type=content_type, + size=size, + origin=origin, + client=client, + **timeout_kwarg ) # Check the returned value and (lack of) side-effect. @@ -2326,12 +2590,19 @@ def _create_resumable_upload_session_helper(self, origin=None, side_effect=None) if origin is not None: expected_headers["Origin"] = origin transport.request.assert_called_once_with( - "POST", upload_url, data=payload, headers=expected_headers, timeout=mock.ANY + "POST", + upload_url, + data=payload, + headers=expected_headers, + timeout=expected_timeout, ) def test_create_resumable_upload_session(self): self._create_resumable_upload_session_helper() + def test_create_resumable_upload_session_with_custom_timeout(self): + self._create_resumable_upload_session_helper(timeout=9.58) + def test_create_resumable_upload_session_with_origin(self): self._create_resumable_upload_session_helper(origin="http://google.com") @@ -3252,6 +3523,41 @@ def test_update_storage_class_large_file(self): self.assertEqual(blob.storage_class, "NEARLINE") + def test_update_storage_class_with_custom_timeout(self): + BLOB_NAME = "blob-name" + STORAGE_CLASS = u"NEARLINE" + TOKEN = "TOKEN" + INCOMPLETE_RESPONSE = { + "totalBytesRewritten": 42, + "objectSize": 84, + "done": False, + "rewriteToken": TOKEN, + "resource": {"storageClass": STORAGE_CLASS}, + } + COMPLETE_RESPONSE = { + "totalBytesRewritten": 84, + "objectSize": 84, + "done": True, + "resource": {"storageClass": STORAGE_CLASS}, + } + response_1 = ({"status": http_client.OK}, INCOMPLETE_RESPONSE) + response_2 = ({"status": http_client.OK}, COMPLETE_RESPONSE) + connection = _Connection(response_1, response_2) + client = _Client(connection) + bucket = _Bucket(client=client) + blob = self._make_one(BLOB_NAME, bucket=bucket) + + blob.update_storage_class("NEARLINE", timeout=9.58) + + self.assertEqual(blob.storage_class, "NEARLINE") + + kw = connection._requested + self.assertEqual(len(kw), 2) + + for kw_item in kw: + self.assertIn("timeout", kw_item) + self.assertEqual(kw_item["timeout"], 9.58) + def test_update_storage_class_wo_encryption_key(self): BLOB_NAME = "blob-name" STORAGE_CLASS = u"NEARLINE" From d8432cd65a4e9b38eebd1ade2ff00f2f44bb0ef6 Mon Sep 17 00:00:00 2001 From: William Silversmith Date: Thu, 23 Jul 2020 14:46:04 -0400 Subject: [PATCH 11/13] feat(blob.py): auto-populate standard headers for non-chunked downloads (#204) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Thank you for opening a Pull Request! Before submitting your PR, there are a few things you can do to make sure it goes smoothly: - [x] Make sure to open an issue as a [bug/issue](https://github.com/googleapis/python-storage/issues/new/choose) before writing your code! That way we can discuss the change, evaluate designs, and agree on the general idea - [x] Ensure the tests and linter pass - [x] Code coverage does not decrease (if any source code was changed) - [x] Appropriate docs were updated (if necessary) Fixes #24 🦕 This PR autopopulates the following fields for non-chunked downloads based on the server header response: ``` blob.content_encoding blob.content_type blob.cache_control blob.storage_class blob.content_language blob.md5_hash blob.crc32c ``` --- google/cloud/storage/blob.py | 33 +++++++++++++++++++++++++++++++-- tests/unit/test_blob.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 62 insertions(+), 2 deletions(-) diff --git a/google/cloud/storage/blob.py b/google/cloud/storage/blob.py index efad9ae39..07a17867c 100644 --- a/google/cloud/storage/blob.py +++ b/google/cloud/storage/blob.py @@ -31,6 +31,7 @@ from io import BytesIO import mimetypes import os +import re import warnings import six @@ -783,6 +784,34 @@ def _get_download_url( ) return _add_query_parameters(base_url, name_value_pairs) + def _extract_headers_from_download(self, response): + """Extract headers from a non-chunked request's http object. + + This avoids the need to make a second request for commonly used + headers. + + :type response: + :class requests.models.Response + :param response: The server response from downloading a non-chunked file + """ + self.content_encoding = response.headers.get("Content-Encoding", None) + self.content_type = response.headers.get("Content-Type", None) + self.cache_control = response.headers.get("Cache-Control", None) + self.storage_class = response.headers.get("X-Goog-Storage-Class", None) + self.content_language = response.headers.get("Content-Language", None) + # 'X-Goog-Hash': 'crc32c=4gcgLQ==,md5=CS9tHYTtyFntzj7B9nkkJQ==', + x_goog_hash = response.headers.get("X-Goog-Hash", "") + + digests = {} + for encoded_digest in x_goog_hash.split(","): + match = re.match(r"(crc32c|md5)=([\w\d]+)==", encoded_digest) + if match: + method, digest = match.groups() + digests[method] = digest + + self.crc32c = digests.get("crc32c", None) + self.md5_hash = digests.get("md5", None) + def _do_download( self, transport, @@ -840,8 +869,8 @@ def _do_download( download = klass( download_url, stream=file_obj, headers=headers, start=start, end=end ) - download.consume(transport, timeout=timeout) - + response = download.consume(transport, timeout=timeout) + self._extract_headers_from_download(response) else: if raw_download: diff --git a/tests/unit/test_blob.py b/tests/unit/test_blob.py index 4635b050e..54aeae671 100644 --- a/tests/unit/test_blob.py +++ b/tests/unit/test_blob.py @@ -1451,6 +1451,37 @@ def _download_as_string_helper(self, raw_download, timeout=None): stream = blob._do_download.mock_calls[0].args[1] self.assertIsInstance(stream, io.BytesIO) + def test_download_as_string_w_response_headers(self): + blob_name = "blob-name" + client = mock.Mock(spec=["_http"]) + bucket = _Bucket(client) + media_link = "http://example.com/media/" + properties = {"mediaLink": media_link} + blob = self._make_one(blob_name, bucket=bucket, properties=properties) + + response = self._mock_requests_response( + http_client.OK, + headers={ + "Content-Type": "application/json", + "Content-Language": "ko-kr", + "Cache-Control": "max-age=1337;public", + "Content-Encoding": "gzip", + "X-Goog-Storage-Class": "STANDARD", + "X-Goog-Hash": "crc32c=4gcgLQ==,md5=CS9tHYTtyFntzj7B9nkkJQ==", + }, + # { "x": 5 } gzipped + content=b"\x1f\x8b\x08\x00\xcfo\x17_\x02\xff\xabVP\xaaP\xb2R0U\xa8\x05\x00\xa1\xcaQ\x93\n\x00\x00\x00", + ) + blob._extract_headers_from_download(response) + + self.assertEqual(blob.content_type, "application/json") + self.assertEqual(blob.content_language, "ko-kr") + self.assertEqual(blob.content_encoding, "gzip") + self.assertEqual(blob.cache_control, "max-age=1337;public") + self.assertEqual(blob.storage_class, "STANDARD") + self.assertEqual(blob.md5_hash, "CS9tHYTtyFntzj7B9nkkJQ") + self.assertEqual(blob.crc32c, "4gcgLQ") + def test_download_as_string_w_generation_match(self): GENERATION_NUMBER = 6 MEDIA_LINK = "http://example.com/media/" From 55bae9a0e7c0db512c10c6b3b621cd2ef05c9729 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Fri, 24 Jul 2020 13:17:59 -0400 Subject: [PATCH 12/13] chore(packaging): prep for grmp-1.0.0 release (#212) Fixes #211 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b2ded72b6..d977f2a7e 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,7 @@ dependencies = [ "google-auth >= 1.11.0, < 2.0dev", "google-cloud-core >= 1.2.0, < 2.0dev", - "google-resumable-media >= 0.6.0, < 0.7dev", + "google-resumable-media >= 0.6.0, < 2.0dev", ] extras = {} From 3d072f6e7259613208debd933a974167ce41f95b Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Fri, 24 Jul 2020 16:41:26 -0400 Subject: [PATCH 13/13] chore: release 1.30.0 (#191) * chore: updated CHANGELOG.md [ci skip] * chore: updated setup.cfg [ci skip] * chore: updated setup.py * docs: remove spurious changelog prefixes * docs: add changelog for PR #212 [ci skip] Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Tres Seaver --- CHANGELOG.md | 72 +++++++++++++++++++++++++++++++++++++--------------- setup.py | 2 +- 2 files changed, 53 insertions(+), 21 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3addf68b8..0e117c6b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,38 @@ [1]: https://pypi.org/project/google-cloud-storage/#history +## [1.30.0](https://www.github.com/googleapis/python-storage/compare/v1.29.0...v1.30.0) (2020-07-24) + + +### Features + +* add timeouts to Blob methods where missing ([#185](https://www.github.com/googleapis/python-storage/issues/185)) ([6eeb855](https://www.github.com/googleapis/python-storage/commit/6eeb855aa0e6a7835d1d4f6e72951e43af22ab57)) +* auto-populate standard headers for non-chunked downloads ([#204](https://www.github.com/googleapis/python-storage/issues/204)) ([d8432cd](https://www.github.com/googleapis/python-storage/commit/d8432cd65a4e9b38eebd1ade2ff00f2f44bb0ef6)), closes [#24](https://www.github.com/googleapis/python-storage/issues/24) +* migrate to Service Account Credentials API ([#189](https://www.github.com/googleapis/python-storage/issues/189)) ([e4990d0](https://www.github.com/googleapis/python-storage/commit/e4990d06043dbd8d1a417f3a1a67fe8746071f1c)) + + +### Bug Fixes + +* add multiprocessing.rst to synthool excludes ([#186](https://www.github.com/googleapis/python-storage/issues/186)) ([4d76e38](https://www.github.com/googleapis/python-storage/commit/4d76e3882210ed2818a43256265f6df31348d410)) + + +### Documentation + +* fix indent in code blocks ([#171](https://www.github.com/googleapis/python-storage/issues/171)) ([62d1543](https://www.github.com/googleapis/python-storage/commit/62d1543e18040b286b23464562aa6eb998074c54)), closes [#170](https://www.github.com/googleapis/python-storage/issues/170) +* remove doubled word in docstring ([#209](https://www.github.com/googleapis/python-storage/issues/209)) ([7a4e7a5](https://www.github.com/googleapis/python-storage/commit/7a4e7a5974abedb0b7b2e110cacbfcd0a40346b6)) + + +### Documentation + +* fix indent in code blocks ([#171](https://www.github.com/googleapis/python-storage/issues/171)) ([62d1543](https://www.github.com/googleapis/python-storage/commit/62d1543e18040b286b23464562aa6eb998074c54)), closes [#170](https://www.github.com/googleapis/python-storage/issues/170) +* remove doubled word in docstring ([#209](https://www.github.com/googleapis/python-storage/issues/209)) ([7a4e7a5](https://www.github.com/googleapis/python-storage/commit/7a4e7a5974abedb0b7b2e110cacbfcd0a40346b6)) + + +### Dependencies + +* prep for grmp-1.0.0 release ([#211](https://www.github.com/googleapis/python-storage/issues/211)) ([55bae9a](https://www.github.com/googleapis/python-storage/commit/55bae9a0e7c0db512c10c6b3b621cd2ef05c9729)) + + ## [1.29.0](https://www.github.com/googleapis/python-storage/compare/v1.28.1...v1.29.0) (2020-06-09) @@ -20,11 +52,11 @@ ### Bug Fixes -* **storage:** add documentaion of list_blobs with user project ([#147](https://www.github.com/googleapis/python-storage/issues/147)) ([792b21f](https://www.github.com/googleapis/python-storage/commit/792b21fd2263b518d56f79cab6a4a1bb06c6e4e7)) -* **storage:** add projection parameter to blob.reload method ([#146](https://www.github.com/googleapis/python-storage/issues/146)) ([ddad20b](https://www.github.com/googleapis/python-storage/commit/ddad20b3c3d2e6bf482e34dad85fa4b0ff90e1b1)) -* **storage:** add unused variables to method generation match ([#152](https://www.github.com/googleapis/python-storage/issues/152)) ([f6574bb](https://www.github.com/googleapis/python-storage/commit/f6574bb84c60c30989d05dba97b423579360cdb2)) -* **storage:** change the method names in snippets file ([#161](https://www.github.com/googleapis/python-storage/issues/161)) ([e516ed9](https://www.github.com/googleapis/python-storage/commit/e516ed9be518e30df4e201d3242f979c0b081086)) -* **storage:** fix upload object with bucket cmek enabled ([#158](https://www.github.com/googleapis/python-storage/issues/158)) ([5f27ffa](https://www.github.com/googleapis/python-storage/commit/5f27ffa3b1b55681453b594a0ef9e2811fc5f0c8)) +* add documentaion of list_blobs with user project ([#147](https://www.github.com/googleapis/python-storage/issues/147)) ([792b21f](https://www.github.com/googleapis/python-storage/commit/792b21fd2263b518d56f79cab6a4a1bb06c6e4e7)) +* add projection parameter to blob.reload method ([#146](https://www.github.com/googleapis/python-storage/issues/146)) ([ddad20b](https://www.github.com/googleapis/python-storage/commit/ddad20b3c3d2e6bf482e34dad85fa4b0ff90e1b1)) +* add unused variables to method generation match ([#152](https://www.github.com/googleapis/python-storage/issues/152)) ([f6574bb](https://www.github.com/googleapis/python-storage/commit/f6574bb84c60c30989d05dba97b423579360cdb2)) +* change the method names in snippets file ([#161](https://www.github.com/googleapis/python-storage/issues/161)) ([e516ed9](https://www.github.com/googleapis/python-storage/commit/e516ed9be518e30df4e201d3242f979c0b081086)) +* fix upload object with bucket cmek enabled ([#158](https://www.github.com/googleapis/python-storage/issues/158)) ([5f27ffa](https://www.github.com/googleapis/python-storage/commit/5f27ffa3b1b55681453b594a0ef9e2811fc5f0c8)) * set default POST policy scheme to "http" ([#172](https://www.github.com/googleapis/python-storage/issues/172)) ([90c020d](https://www.github.com/googleapis/python-storage/commit/90c020d69a69ebc396416e4086a2e0838932130c)) ### [1.28.1](https://www.github.com/googleapis/python-storage/compare/v1.28.0...v1.28.1) (2020-04-28) @@ -32,7 +64,7 @@ ### Bug Fixes -* **storage:** anonymous credentials for private bucket ([#107](https://www.github.com/googleapis/python-storage/issues/107)) ([6152ab4](https://www.github.com/googleapis/python-storage/commit/6152ab4067d39ba824f9b6a17b83859dd7236cec)) +* anonymous credentials for private bucket ([#107](https://www.github.com/googleapis/python-storage/issues/107)) ([6152ab4](https://www.github.com/googleapis/python-storage/commit/6152ab4067d39ba824f9b6a17b83859dd7236cec)) * add bucket name into POST policy conditions ([#118](https://www.github.com/googleapis/python-storage/issues/118)) ([311ecab](https://www.github.com/googleapis/python-storage/commit/311ecabf8acc3018cef0697dd29483693f7722b9)) ## [1.28.0](https://www.github.com/googleapis/python-storage/compare/v1.27.0...v1.28.0) (2020-04-22) @@ -40,13 +72,13 @@ ### Features -* **storage:** add arguments for *GenerationMatch uploading options ([#111](https://www.github.com/googleapis/python-storage/issues/111)) ([b11aa5f](https://www.github.com/googleapis/python-storage/commit/b11aa5f00753b094580847bc62c154ae0e584dbc)) +* add arguments for *GenerationMatch uploading options ([#111](https://www.github.com/googleapis/python-storage/issues/111)) ([b11aa5f](https://www.github.com/googleapis/python-storage/commit/b11aa5f00753b094580847bc62c154ae0e584dbc)) ### Bug Fixes -* **storage:** fix incorrect mtime by UTC offset ([#42](https://www.github.com/googleapis/python-storage/issues/42)) ([76bd652](https://www.github.com/googleapis/python-storage/commit/76bd652a3078d94e03e566b6a387fc488ab26910)) -* **storage:** remove expiration strict conversion ([#106](https://www.github.com/googleapis/python-storage/issues/106)) ([9550dad](https://www.github.com/googleapis/python-storage/commit/9550dad6e63e249110fc9dcda245061b76dacdcf)), closes [#105](https://www.github.com/googleapis/python-storage/issues/105) +* fix incorrect mtime by UTC offset ([#42](https://www.github.com/googleapis/python-storage/issues/42)) ([76bd652](https://www.github.com/googleapis/python-storage/commit/76bd652a3078d94e03e566b6a387fc488ab26910)) +* remove expiration strict conversion ([#106](https://www.github.com/googleapis/python-storage/issues/106)) ([9550dad](https://www.github.com/googleapis/python-storage/commit/9550dad6e63e249110fc9dcda245061b76dacdcf)), closes [#105](https://www.github.com/googleapis/python-storage/issues/105) ## [1.27.0](https://www.github.com/googleapis/python-storage/compare/v1.26.0...v1.27.0) (2020-04-01) @@ -54,35 +86,35 @@ ### Features * generate signed URLs for blobs/buckets using virtual hostname ([#58](https://www.github.com/googleapis/python-storage/issues/58)) ([23df542](https://www.github.com/googleapis/python-storage/commit/23df542d0669852b05139023d5ef1ae14a09f4c7)) -* **storage:** Add cname support for V4 signature ([#72](https://www.github.com/googleapis/python-storage/issues/72)) ([cc853af](https://www.github.com/googleapis/python-storage/commit/cc853af6bf8e44e5b16e8cdfb3a275629ffb1f27)) -* **storage:** add conformance tests for virtual hosted style signed URLs ([#83](https://www.github.com/googleapis/python-storage/issues/83)) ([5adc8b0](https://www.github.com/googleapis/python-storage/commit/5adc8b0e6ffe28185a4085cd1fc8c1b4998094aa)) -* **storage:** add get notification method ([#77](https://www.github.com/googleapis/python-storage/issues/77)) ([f602252](https://www.github.com/googleapis/python-storage/commit/f6022521bee0824e1b291211703afc5eae6c6891)) -* **storage:** improve v4 signature query parameters encoding ([#48](https://www.github.com/googleapis/python-storage/issues/48)) ([8df0b55](https://www.github.com/googleapis/python-storage/commit/8df0b554a1904787889309707f08c6b8683cad44)) +* Add cname support for V4 signature ([#72](https://www.github.com/googleapis/python-storage/issues/72)) ([cc853af](https://www.github.com/googleapis/python-storage/commit/cc853af6bf8e44e5b16e8cdfb3a275629ffb1f27)) +* add conformance tests for virtual hosted style signed URLs ([#83](https://www.github.com/googleapis/python-storage/issues/83)) ([5adc8b0](https://www.github.com/googleapis/python-storage/commit/5adc8b0e6ffe28185a4085cd1fc8c1b4998094aa)) +* add get notification method ([#77](https://www.github.com/googleapis/python-storage/issues/77)) ([f602252](https://www.github.com/googleapis/python-storage/commit/f6022521bee0824e1b291211703afc5eae6c6891)) +* improve v4 signature query parameters encoding ([#48](https://www.github.com/googleapis/python-storage/issues/48)) ([8df0b55](https://www.github.com/googleapis/python-storage/commit/8df0b554a1904787889309707f08c6b8683cad44)) ### Bug Fixes -* **storage:** fix blob metadata to None regression ([#60](https://www.github.com/googleapis/python-storage/issues/60)) ([a834d1b](https://www.github.com/googleapis/python-storage/commit/a834d1b54aa96152ced4d841c4e0c241acd2d8d8)) +* fix blob metadata to None regression ([#60](https://www.github.com/googleapis/python-storage/issues/60)) ([a834d1b](https://www.github.com/googleapis/python-storage/commit/a834d1b54aa96152ced4d841c4e0c241acd2d8d8)) * add classifer for Python 3.8 ([#63](https://www.github.com/googleapis/python-storage/issues/63)) ([1b9b6bc](https://www.github.com/googleapis/python-storage/commit/1b9b6bc2601ee336a8399266852fb850e368b30a)) * make v4 signing formatting consistent w/ spec ([#56](https://www.github.com/googleapis/python-storage/issues/56)) ([8712da8](https://www.github.com/googleapis/python-storage/commit/8712da84c93600a736e72a097c42a49b4724347d)) * use correct IAM object admin role ([#71](https://www.github.com/googleapis/python-storage/issues/71)) ([2e27edd](https://www.github.com/googleapis/python-storage/commit/2e27edd3fe65cd5e17c12bf11f2b58f611937d61)) -* **storage:** remove docstring of retrun in reload method ([#78](https://www.github.com/googleapis/python-storage/issues/78)) ([4abeb1c](https://www.github.com/googleapis/python-storage/commit/4abeb1c0810c4e5d716758536da9fc204fa4c2a9)) -* **storage:** use OrderedDict while encoding POST policy ([#95](https://www.github.com/googleapis/python-storage/issues/95)) ([df560e1](https://www.github.com/googleapis/python-storage/commit/df560e178369a6d03140e412a25af6ec7444f5a1)) +* remove docstring of retrun in reload method ([#78](https://www.github.com/googleapis/python-storage/issues/78)) ([4abeb1c](https://www.github.com/googleapis/python-storage/commit/4abeb1c0810c4e5d716758536da9fc204fa4c2a9)) +* use OrderedDict while encoding POST policy ([#95](https://www.github.com/googleapis/python-storage/issues/95)) ([df560e1](https://www.github.com/googleapis/python-storage/commit/df560e178369a6d03140e412a25af6ec7444f5a1)) ## [1.26.0](https://www.github.com/googleapis/python-storage/compare/v1.25.0...v1.26.0) (2020-02-12) ### Features -* **storage:** add support for signing URLs using token ([#9889](https://www.github.com/googleapis/google-cloud-python/issues/9889)) ([ad280bf](https://www.github.com/googleapis/python-storage/commit/ad280bf506d3d7a37c402d06eac07422a5fe80af)) +* add support for signing URLs using token ([#9889](https://www.github.com/googleapis/google-cloud-python/issues/9889)) ([ad280bf](https://www.github.com/googleapis/python-storage/commit/ad280bf506d3d7a37c402d06eac07422a5fe80af)) * add timeout parameter to public methods ([#44](https://www.github.com/googleapis/python-storage/issues/44)) ([63abf07](https://www.github.com/googleapis/python-storage/commit/63abf0778686df1caa001270dd22f9df0daf0c78)) ### Bug Fixes -* **storage:** fix documentation of max_result parameter in list_blob ([#43](https://www.github.com/googleapis/python-storage/issues/43)) ([ff15f19](https://www.github.com/googleapis/python-storage/commit/ff15f19d3a5830acdd540181dc6e9d07ca7d88ee)) -* **storage:** fix system test and change scope for iam access token ([#47](https://www.github.com/googleapis/python-storage/issues/47)) ([bc5375f](https://www.github.com/googleapis/python-storage/commit/bc5375f4c88f7e6ad1afbe7667c49d9a846e9757)) -* **tests:** remove low version error assertion from iam conditions system tests ([#53](https://www.github.com/googleapis/python-storage/issues/53)) ([8904aee](https://www.github.com/googleapis/python-storage/commit/8904aee9ad5dc01ab83e1460b6f186a739668eb7)) +* fix documentation of max_result parameter in list_blob ([#43](https://www.github.com/googleapis/python-storage/issues/43)) ([ff15f19](https://www.github.com/googleapis/python-storage/commit/ff15f19d3a5830acdd540181dc6e9d07ca7d88ee)) +* fix system test and change scope for iam access token ([#47](https://www.github.com/googleapis/python-storage/issues/47)) ([bc5375f](https://www.github.com/googleapis/python-storage/commit/bc5375f4c88f7e6ad1afbe7667c49d9a846e9757)) +* remove low version error assertion from iam conditions system tests ([#53](https://www.github.com/googleapis/python-storage/issues/53)) ([8904aee](https://www.github.com/googleapis/python-storage/commit/8904aee9ad5dc01ab83e1460b6f186a739668eb7)) ## 1.25.0 diff --git a/setup.py b/setup.py index d977f2a7e..91cb1dcc8 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-storage" description = "Google Cloud Storage API client library" -version = "1.29.0" +version = "1.30.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'