From 55fe0b18c9e7456b43d15f91672874a1e30d912e Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 8 May 2018 10:37:56 -0700 Subject: [PATCH 01/75] Redis generated (#5309) * generated redis initial version --- redis/.coveragerc | 14 + redis/.flake8 | 12 + redis/LICENSE | 201 ++++ redis/MANIFEST.in | 5 + redis/README.rst | 76 ++ redis/docs/conf.py | 310 +++++ redis/docs/gapic/v1beta1/api.rst | 6 + redis/docs/gapic/v1beta1/types.rst | 5 + redis/docs/index.rst | 84 ++ redis/google/__init__.py | 20 + redis/google/cloud/__init__.py | 20 + redis/google/cloud/redis.py | 25 + redis/google/cloud/redis_v1beta1/__init__.py | 31 + .../cloud/redis_v1beta1/gapic/__init__.py | 0 .../redis_v1beta1/gapic/cloud_redis_client.py | 569 +++++++++ .../gapic/cloud_redis_client_config.py | 48 + .../google/cloud/redis_v1beta1/gapic/enums.py | 53 + .../cloud/redis_v1beta1/proto/__init__.py | 0 .../redis_v1beta1/proto/cloud_redis_pb2.py | 1015 +++++++++++++++++ .../proto/cloud_redis_pb2_grpc.py | 166 +++ redis/google/cloud/redis_v1beta1/types.py | 47 + redis/nox.py | 107 ++ redis/setup.cfg | 2 + redis/setup.py | 87 ++ .../test_cloud_redis_client_v1beta1.py | 367 ++++++ 25 files changed, 3270 insertions(+) create mode 100644 redis/.coveragerc create mode 100644 redis/.flake8 create mode 100644 redis/LICENSE create mode 100644 redis/MANIFEST.in create mode 100644 redis/README.rst create mode 100644 redis/docs/conf.py create mode 100644 redis/docs/gapic/v1beta1/api.rst create mode 100644 redis/docs/gapic/v1beta1/types.rst create mode 100644 redis/docs/index.rst create mode 100644 redis/google/__init__.py create mode 100644 redis/google/cloud/__init__.py create mode 100644 redis/google/cloud/redis.py create mode 100644 redis/google/cloud/redis_v1beta1/__init__.py create mode 100644 redis/google/cloud/redis_v1beta1/gapic/__init__.py create mode 100644 redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py create mode 100644 redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py create mode 100644 redis/google/cloud/redis_v1beta1/gapic/enums.py create mode 100644 redis/google/cloud/redis_v1beta1/proto/__init__.py create mode 100644 redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py create mode 100644 redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2_grpc.py create mode 100644 redis/google/cloud/redis_v1beta1/types.py create mode 100644 redis/nox.py create mode 100644 redis/setup.cfg create mode 100644 redis/setup.py create mode 100644 redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py diff --git a/redis/.coveragerc b/redis/.coveragerc new file mode 100644 index 000000000000..06cea6d99944 --- /dev/null +++ b/redis/.coveragerc @@ -0,0 +1,14 @@ +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ +omit = + */gapic/*.py + */proto/*.py diff --git a/redis/.flake8 b/redis/.flake8 new file mode 100644 index 000000000000..1f44a90f8195 --- /dev/null +++ b/redis/.flake8 @@ -0,0 +1,12 @@ +[flake8] +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + *_pb2.py + + # Standard linting exemptions. + __pycache__, + .git, + *.pyc, + conf.py diff --git a/redis/LICENSE b/redis/LICENSE new file mode 100644 index 000000000000..a8ee855de2aa --- /dev/null +++ b/redis/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/redis/MANIFEST.in b/redis/MANIFEST.in new file mode 100644 index 000000000000..9cbf175afe6b --- /dev/null +++ b/redis/MANIFEST.in @@ -0,0 +1,5 @@ +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/redis/README.rst b/redis/README.rst new file mode 100644 index 000000000000..dd1d377b51cb --- /dev/null +++ b/redis/README.rst @@ -0,0 +1,76 @@ +Python Client for Google Cloud Memorystore for Redis API (`Alpha`_) +=================================================================== + +`Google Cloud Memorystore for Redis API`_: The Google Cloud Memorystore for Redis API is used for creating and managing +Redis instances on the Google Cloud Platform. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +.. _Google Cloud Memorystore for Redis API: https://cloud.google.com/redis +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/redis/usage.html +.. _Product Documentation: https://cloud.google.com/redis + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Memorystore for Redis API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Memorystore for Redis API.: https://cloud.google.com/redis +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-redis + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-redis + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Cloud Memorystore for Redis API + API to see other available methods on the client. +- Read the `Google Cloud Memorystore for Redis API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Cloud Memorystore for Redis API Product documentation: https://cloud.google.com/redis +.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/redis/docs/conf.py b/redis/docs/conf.py new file mode 100644 index 000000000000..bbd8a724003e --- /dev/null +++ b/redis/docs/conf.py @@ -0,0 +1,310 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-redis documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +__version__ = '0.1.0' + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', +] + +# autodoc/autosummary flags +autoclass_content = 'both' +autodoc_default_flags = ['members'] +autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'google-cloud-redis' +copyright = u'2017, Google' +author = u'Google APIs' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = '.'.join(release.split('.')[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'google-cloud-redis-doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', + + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'google-cloud-redis.tex', u'google-cloud-redis Documentation', + author, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, 'google-cloud-redis', + u'google-cloud-redis Documentation', [author], 1)] + +# If true, show URL addresses after external links. +#man_show_urls = False + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'google-cloud-redis', u'google-cloud-redis Documentation', + author, 'google-cloud-redis', + 'GAPIC library for the {metadata.shortName} v1beta1 service', 'APIs'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('http://python.readthedocs.org/en/latest/', None), + 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/redis/docs/gapic/v1beta1/api.rst b/redis/docs/gapic/v1beta1/api.rst new file mode 100644 index 000000000000..b1505646c85c --- /dev/null +++ b/redis/docs/gapic/v1beta1/api.rst @@ -0,0 +1,6 @@ +Client for Google Cloud Memorystore for Redis API +================================================= + +.. automodule:: google.cloud.redis_v1beta1 + :members: + :inherited-members: \ No newline at end of file diff --git a/redis/docs/gapic/v1beta1/types.rst b/redis/docs/gapic/v1beta1/types.rst new file mode 100644 index 000000000000..579707e712de --- /dev/null +++ b/redis/docs/gapic/v1beta1/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Memorystore for Redis API Client +======================================================= + +.. automodule:: google.cloud.redis_v1beta1.types + :members: \ No newline at end of file diff --git a/redis/docs/index.rst b/redis/docs/index.rst new file mode 100644 index 000000000000..6735c3065158 --- /dev/null +++ b/redis/docs/index.rst @@ -0,0 +1,84 @@ +Python Client for Google Cloud Memorystore for Redis API (`Alpha`_) +=================================================================== + +`Google Cloud Memorystore for Redis API`_: The Google Cloud Memorystore for Redis API is used for creating and managing +Redis instances on the Google Cloud Platform. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +.. _Google Cloud Memorystore for Redis API: https://cloud.google.com/redis +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/redis/usage.html +.. _Product Documentation: https://cloud.google.com/redis + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Google Cloud Memorystore for Redis API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Google Cloud Memorystore for Redis API.: https://cloud.google.com/redis +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-redis + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-redis + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Google Cloud Memorystore for Redis API + API to see other available methods on the client. +- Read the `Google Cloud Memorystore for Redis API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Google Cloud Memorystore for Redis API Product documentation: https://cloud.google.com/redis +.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst + +Api Reference +------------- +.. toctree:: + :maxdepth: 2 + + gapic/v1beta1/api + gapic/v1beta1/types \ No newline at end of file diff --git a/redis/google/__init__.py b/redis/google/__init__.py new file mode 100644 index 000000000000..7a9e5a0ef198 --- /dev/null +++ b/redis/google/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/redis/google/cloud/__init__.py b/redis/google/cloud/__init__.py new file mode 100644 index 000000000000..7a9e5a0ef198 --- /dev/null +++ b/redis/google/cloud/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/redis/google/cloud/redis.py b/redis/google/cloud/redis.py new file mode 100644 index 000000000000..50cd6bfc1425 --- /dev/null +++ b/redis/google/cloud/redis.py @@ -0,0 +1,25 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.redis_v1beta1 import CloudRedisClient +from google.cloud.redis_v1beta1 import enums +from google.cloud.redis_v1beta1 import types + +__all__ = ( + 'enums', + 'types', + 'CloudRedisClient', +) diff --git a/redis/google/cloud/redis_v1beta1/__init__.py b/redis/google/cloud/redis_v1beta1/__init__.py new file mode 100644 index 000000000000..1b6e08085d52 --- /dev/null +++ b/redis/google/cloud/redis_v1beta1/__init__.py @@ -0,0 +1,31 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.redis_v1beta1 import types +from google.cloud.redis_v1beta1.gapic import cloud_redis_client +from google.cloud.redis_v1beta1.gapic import enums + + +class CloudRedisClient(cloud_redis_client.CloudRedisClient): + __doc__ = cloud_redis_client.CloudRedisClient.__doc__ + enums = enums + + +__all__ = ( + 'enums', + 'types', + 'CloudRedisClient', +) diff --git a/redis/google/cloud/redis_v1beta1/gapic/__init__.py b/redis/google/cloud/redis_v1beta1/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py b/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py new file mode 100644 index 000000000000..70ecf9b65897 --- /dev/null +++ b/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py @@ -0,0 +1,569 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Accesses the google.cloud.redis.v1beta1 CloudRedis API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.operation +import google.api_core.operations_v1 +import google.api_core.page_iterator +import google.api_core.path_template + +from google.cloud.redis_v1beta1.gapic import cloud_redis_client_config +from google.cloud.redis_v1beta1.gapic import enums +from google.cloud.redis_v1beta1.proto import cloud_redis_pb2 +from google.cloud.redis_v1beta1.proto import cloud_redis_pb2_grpc +from google.longrunning import operations_pb2 +from google.protobuf import any_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-redis', ).version + + +class CloudRedisClient(object): + """ + Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1beta1 + + The ``redis.googleapis.com`` service implements the Google Cloud Memorystore + for Redis API and defines the following resource model for managing Redis + instances: + * The service works with a collection of cloud projects, named: ``/projects/*`` + * Each project has a collection of available locations, named: ``/locations/*`` + * Each location has a collection of Redis instances, named: ``/instances/*`` + * As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` + + Note that location_id must be refering to a GCP ``region``; for example: + * ``projects/redpepper-1290/locations/us-central1/instances/my-redis`` + """ + + SERVICE_ADDRESS = 'redis.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.cloud.redis.v1beta1.CloudRedis' + + @classmethod + def location_path(cls, project, location): + """Return a fully-qualified location string.""" + return google.api_core.path_template.expand( + 'projects/{project}/locations/{location}', + project=project, + location=location, + ) + + @classmethod + def instance_path(cls, project, location, instance): + """Return a fully-qualified instance string.""" + return google.api_core.path_template.expand( + 'projects/{project}/locations/{location}/instances/{instance}', + project=project, + location=location, + instance=instance, + ) + + def __init__(self, + channel=None, + credentials=None, + client_config=cloud_redis_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): A dictionary of call options for each + method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__), ) + + # Create the channel. + if channel is None: + channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES, + ) + + # Create the gRPC stubs. + self.cloud_redis_stub = (cloud_redis_pb2_grpc.CloudRedisStub(channel)) + + # Operations client for methods that return long-running operations + # futures. + self.operations_client = ( + google.api_core.operations_v1.OperationsClient(channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) + + # Write the "inner API call" methods to the class. + # These are wrapped versions of the gRPC stub methods, with retry and + # timeout configuration applied, called by the public methods on + # this class. + self._list_instances = google.api_core.gapic_v1.method.wrap_method( + self.cloud_redis_stub.ListInstances, + default_retry=method_configs['ListInstances'].retry, + default_timeout=method_configs['ListInstances'].timeout, + client_info=client_info, + ) + self._get_instance = google.api_core.gapic_v1.method.wrap_method( + self.cloud_redis_stub.GetInstance, + default_retry=method_configs['GetInstance'].retry, + default_timeout=method_configs['GetInstance'].timeout, + client_info=client_info, + ) + self._create_instance = google.api_core.gapic_v1.method.wrap_method( + self.cloud_redis_stub.CreateInstance, + default_retry=method_configs['CreateInstance'].retry, + default_timeout=method_configs['CreateInstance'].timeout, + client_info=client_info, + ) + self._update_instance = google.api_core.gapic_v1.method.wrap_method( + self.cloud_redis_stub.UpdateInstance, + default_retry=method_configs['UpdateInstance'].retry, + default_timeout=method_configs['UpdateInstance'].timeout, + client_info=client_info, + ) + self._delete_instance = google.api_core.gapic_v1.method.wrap_method( + self.cloud_redis_stub.DeleteInstance, + default_retry=method_configs['DeleteInstance'].retry, + default_timeout=method_configs['DeleteInstance'].timeout, + client_info=client_info, + ) + + # Service calls + def list_instances(self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists all Redis instances owned by a project in either the specified + location (region) or all locations. + + The location should have the following format: + * ``projects/{project_id}/locations/{location_id}`` + + If ``location_id`` is specified as ``-`` (wildcard), then all regions + available to the project are queried, and the results are aggregated. + + Example: + >>> from google.cloud import redis_v1beta1 + >>> + >>> client = redis_v1beta1.CloudRedisClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_instances(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_instances(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. The resource name of the instance location using the form: + :: + + `projects/{project_id}/locations/{location_id}` + where ``location_id`` refers to a GCP region + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.redis_v1beta1.types.Instance` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = cloud_redis_pb2.ListInstancesRequest( + parent=parent, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._list_instances, + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='instances', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def get_instance(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets the details of a specific Redis instance. + + Example: + >>> from google.cloud import redis_v1beta1 + >>> + >>> client = redis_v1beta1.CloudRedisClient() + >>> + >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + >>> + >>> response = client.get_instance(name) + + Args: + name (str): Required. Redis instance resource name using the form: + :: + + `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + where ``location_id`` refers to a GCP region + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.redis_v1beta1.types.Instance` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = cloud_redis_pb2.GetInstanceRequest(name=name, ) + return self._get_instance( + request, retry=retry, timeout=timeout, metadata=metadata) + + def create_instance(self, + parent, + instance_id, + instance, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a Redis instance based on the specified tier and memory size. + + By default, the instance is peered to the project's + `default network `_. + + The creation is executed asynchronously and callers may check the returned + operation to track its progress. Once the operation is completed the Redis + instance will be fully functional. Completed longrunning.Operation will + contain the new instance object in the response field. + + The returned operation is automatically deleted after a few hours, so there + is no need to call DeleteOperation. + + Example: + >>> from google.cloud import redis_v1beta1 + >>> from google.cloud.redis_v1beta1 import enums + >>> + >>> client = redis_v1beta1.CloudRedisClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> instance_id = 'test_instance' + >>> tier = enums.Instance.Tier.BASIC + >>> memory_size_gb = 1 + >>> instance = {'tier': tier, 'memory_size_gb': memory_size_gb} + >>> + >>> response = client.create_instance(parent, instance_id, instance) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + parent (str): Required. The resource name of the instance location using the form: + :: + + `projects/{project_id}/locations/{location_id}` + where ``location_id`` refers to a GCP region + instance_id (str): Required. The logical name of the Redis instance in the customer project + with the following restrictions: + + * Must contain only lowercase letters, numbers, and hyphens. + * Must start with a letter. + * Must be between 1-40 characters. + * Must end with a number or a letter. + * Must be unique within the customer project / location + instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. A Redis [Instance] resource + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.redis_v1beta1.types.Instance` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = cloud_redis_pb2.CreateInstanceRequest( + parent=parent, + instance_id=instance_id, + instance=instance, + ) + operation = self._create_instance( + request, retry=retry, timeout=timeout, metadata=metadata) + return google.api_core.operation.from_gapic( + operation, + self.operations_client, + cloud_redis_pb2.Instance, + metadata_type=any_pb2.Any, + ) + + def update_instance(self, + update_mask, + instance, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Updates the metadata and configuration of a specific Redis instance. + + Completed longrunning.Operation will contain the new instance object + in the response field. The returned operation is automatically deleted + after a few hours, so there is no need to call DeleteOperation. + + Example: + >>> from google.cloud import redis_v1beta1 + >>> + >>> client = redis_v1beta1.CloudRedisClient() + >>> + >>> paths_element = 'display_name' + >>> paths_element_2 = 'memory_size_gb' + >>> paths = [paths_element, paths_element_2] + >>> update_mask = {'paths': paths} + >>> display_name = 'UpdatedDisplayName' + >>> memory_size_gb = 4 + >>> instance = {'display_name': display_name, 'memory_size_gb': memory_size_gb} + >>> + >>> response = client.update_instance(update_mask, instance) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + update_mask (Union[dict, ~google.cloud.redis_v1beta1.types.FieldMask]): Required. Mask of fields to update. At least one path must be supplied in + this field. The elements of the repeated paths field may only include these + fields from ``Instance``: + * ``display_name`` + * ``labels`` + * ``memory_size_gb`` + * ``redis_config`` + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.redis_v1beta1.types.FieldMask` + instance (Union[dict, ~google.cloud.redis_v1beta1.types.Instance]): Required. Update description. + Only fields specified in update_mask are updated. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.redis_v1beta1.types.Instance` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = cloud_redis_pb2.UpdateInstanceRequest( + update_mask=update_mask, + instance=instance, + ) + operation = self._update_instance( + request, retry=retry, timeout=timeout, metadata=metadata) + return google.api_core.operation.from_gapic( + operation, + self.operations_client, + cloud_redis_pb2.Instance, + metadata_type=any_pb2.Any, + ) + + def delete_instance(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes a specific Redis instance. Instance stops serving and data is + deleted. + + Example: + >>> from google.cloud import redis_v1beta1 + >>> + >>> client = redis_v1beta1.CloudRedisClient() + >>> + >>> name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + >>> + >>> response = client.delete_instance(name) + >>> + >>> def callback(operation_future): + ... # Handle result. + ... result = operation_future.result() + >>> + >>> response.add_done_callback(callback) + >>> + >>> # Handle metadata. + >>> metadata = response.metadata() + + Args: + name (str): Required. Redis instance resource name using the form: + :: + + `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + where ``location_id`` refers to a GCP region + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.redis_v1beta1.types._OperationFuture` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + request = cloud_redis_pb2.DeleteInstanceRequest(name=name, ) + operation = self._delete_instance( + request, retry=retry, timeout=timeout, metadata=metadata) + return google.api_core.operation.from_gapic( + operation, + self.operations_client, + empty_pb2.Empty, + metadata_type=any_pb2.Any, + ) diff --git a/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py b/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py new file mode 100644 index 000000000000..3af57fd0c972 --- /dev/null +++ b/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client_config.py @@ -0,0 +1,48 @@ +config = { + "interfaces": { + "google.cloud.redis.v1beta1.CloudRedis": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "ListInstances": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetInstance": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "CreateInstance": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "UpdateInstance": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteInstance": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/redis/google/cloud/redis_v1beta1/gapic/enums.py b/redis/google/cloud/redis_v1beta1/gapic/enums.py new file mode 100644 index 000000000000..c245a013f02e --- /dev/null +++ b/redis/google/cloud/redis_v1beta1/gapic/enums.py @@ -0,0 +1,53 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + + +class Instance(object): + class State(object): + """ + Represents the different states of a Redis instance. + + Attributes: + STATE_UNSPECIFIED (int): Not set. + CREATING (int): Redis instance is being created. + READY (int): Redis instance has been created and is fully usable. + UPDATING (int): Redis instance configuration is being updated. Certain kinds of updates + may cause the instance to become unusable while the update is in + progress. + DELETING (int): Redis instance is being deleted. + REPAIRING (int): Redis instance is being repaired and may be unusable. Details can be + found in the ``status_message`` field. + MAINTENANCE (int): Maintenance is being performed on this Redis instance. + """ + STATE_UNSPECIFIED = 0 + CREATING = 1 + READY = 2 + UPDATING = 3 + DELETING = 4 + REPAIRING = 5 + MAINTENANCE = 6 + + class Tier(object): + """ + Available service tiers to choose from + + Attributes: + TIER_UNSPECIFIED (int): Not set. + BASIC (int): BASIC tier: standalone instance + STANDARD_HA (int): STANDARD_HA tier: highly available primary/replica instances + """ + TIER_UNSPECIFIED = 0 + BASIC = 1 + STANDARD_HA = 3 diff --git a/redis/google/cloud/redis_v1beta1/proto/__init__.py b/redis/google/cloud/redis_v1beta1/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py b/redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py new file mode 100644 index 000000000000..f9e658b005a9 --- /dev/null +++ b/redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2.py @@ -0,0 +1,1015 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/redis_v1beta1/proto/cloud_redis.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/redis_v1beta1/proto/cloud_redis.proto', + package='google.cloud.redis.v1beta1', + syntax='proto3', + serialized_pb=_b('\n2google/cloud/redis_v1beta1/proto/cloud_redis.proto\x12\x1agoogle.cloud.redis.v1beta1\x1a\x1cgoogle/api/annotations.proto\x1a#google/longrunning/operations.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xe4\x06\n\x08Instance\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12@\n\x06labels\x18\x03 \x03(\x0b\x32\x30.google.cloud.redis.v1beta1.Instance.LabelsEntry\x12\x13\n\x0blocation_id\x18\x04 \x01(\t\x12\x1f\n\x17\x61lternative_location_id\x18\x05 \x01(\t\x12\x15\n\rredis_version\x18\x07 \x01(\t\x12\x19\n\x11reserved_ip_range\x18\t \x01(\t\x12\x0c\n\x04host\x18\n \x01(\t\x12\x0c\n\x04port\x18\x0b \x01(\x05\x12\x1b\n\x13\x63urrent_location_id\x18\x0c \x01(\t\x12/\n\x0b\x63reate_time\x18\r \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x39\n\x05state\x18\x0e \x01(\x0e\x32*.google.cloud.redis.v1beta1.Instance.State\x12\x16\n\x0estatus_message\x18\x0f \x01(\t\x12M\n\rredis_configs\x18\x10 \x03(\x0b\x32\x36.google.cloud.redis.v1beta1.Instance.RedisConfigsEntry\x12\x37\n\x04tier\x18\x11 \x01(\x0e\x32).google.cloud.redis.v1beta1.Instance.Tier\x12\x16\n\x0ememory_size_gb\x18\x12 \x01(\x05\x12\x1a\n\x12\x61uthorized_network\x18\x14 \x01(\t\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x33\n\x11RedisConfigsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"s\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x43REATING\x10\x01\x12\t\n\x05READY\x10\x02\x12\x0c\n\x08UPDATING\x10\x03\x12\x0c\n\x08\x44\x45LETING\x10\x04\x12\r\n\tREPAIRING\x10\x05\x12\x0f\n\x0bMAINTENANCE\x10\x06\"8\n\x04Tier\x12\x14\n\x10TIER_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x0f\n\x0bSTANDARD_HA\x10\x03\"M\n\x14ListInstancesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"i\n\x15ListInstancesResponse\x12\x37\n\tinstances\x18\x01 \x03(\x0b\x32$.google.cloud.redis.v1beta1.Instance\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"\"\n\x12GetInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"t\n\x15\x43reateInstanceRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x13\n\x0binstance_id\x18\x02 \x01(\t\x12\x36\n\x08instance\x18\x03 \x01(\x0b\x32$.google.cloud.redis.v1beta1.Instance\"\x80\x01\n\x15UpdateInstanceRequest\x12/\n\x0bupdate_mask\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x36\n\x08instance\x18\x02 \x01(\x0b\x32$.google.cloud.redis.v1beta1.Instance\"%\n\x15\x44\x65leteInstanceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xce\x01\n\x10LocationMetadata\x12Y\n\x0f\x61vailable_zones\x18\x01 \x03(\x0b\x32@.google.cloud.redis.v1beta1.LocationMetadata.AvailableZonesEntry\x1a_\n\x13\x41vailableZonesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x37\n\x05value\x18\x02 \x01(\x0b\x32(.google.cloud.redis.v1beta1.ZoneMetadata:\x02\x38\x01\"\x0e\n\x0cZoneMetadata2\xe1\x06\n\nCloudRedis\x12\xb0\x01\n\rListInstances\x12\x30.google.cloud.redis.v1beta1.ListInstancesRequest\x1a\x31.google.cloud.redis.v1beta1.ListInstancesResponse\":\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta1/{parent=projects/*/locations/*}/instances\x12\x9f\x01\n\x0bGetInstance\x12..google.cloud.redis.v1beta1.GetInstanceRequest\x1a$.google.cloud.redis.v1beta1.Instance\":\x82\xd3\xe4\x93\x02\x34\x12\x32/v1beta1/{name=projects/*/locations/*/instances/*}\x12\xa8\x01\n\x0e\x43reateInstance\x12\x31.google.cloud.redis.v1beta1.CreateInstanceRequest\x1a\x1d.google.longrunning.Operation\"D\x82\xd3\xe4\x93\x02>\"2/v1beta1/{parent=projects/*/locations/*}/instances:\x08instance\x12\xb1\x01\n\x0eUpdateInstance\x12\x31.google.cloud.redis.v1beta1.UpdateInstanceRequest\x1a\x1d.google.longrunning.Operation\"M\x82\xd3\xe4\x93\x02G2;/v1beta1/{instance.name=projects/*/locations/*/instances/*}:\x08instance\x12\x9e\x01\n\x0e\x44\x65leteInstance\x12\x31.google.cloud.redis.v1beta1.DeleteInstanceRequest\x1a\x1d.google.longrunning.Operation\":\x82\xd3\xe4\x93\x02\x34*2/v1beta1/{name=projects/*/locations/*/instances/*}B\x7f\n\x1e\x63om.google.cloud.redis.v1beta1B\x1a\x43loudRedisServiceBetaProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/redis/v1beta1;redisb\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_longrunning_dot_operations__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) + + + +_INSTANCE_STATE = _descriptor.EnumDescriptor( + name='State', + full_name='google.cloud.redis.v1beta1.Instance.State', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STATE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='CREATING', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='READY', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='UPDATING', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DELETING', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REPAIRING', index=5, number=5, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MAINTENANCE', index=6, number=6, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=912, + serialized_end=1027, +) +_sym_db.RegisterEnumDescriptor(_INSTANCE_STATE) + +_INSTANCE_TIER = _descriptor.EnumDescriptor( + name='Tier', + full_name='google.cloud.redis.v1beta1.Instance.Tier', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='TIER_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BASIC', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='STANDARD_HA', index=2, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1029, + serialized_end=1085, +) +_sym_db.RegisterEnumDescriptor(_INSTANCE_TIER) + + +_INSTANCE_LABELSENTRY = _descriptor.Descriptor( + name='LabelsEntry', + full_name='google.cloud.redis.v1beta1.Instance.LabelsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.cloud.redis.v1beta1.Instance.LabelsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='google.cloud.redis.v1beta1.Instance.LabelsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=812, + serialized_end=857, +) + +_INSTANCE_REDISCONFIGSENTRY = _descriptor.Descriptor( + name='RedisConfigsEntry', + full_name='google.cloud.redis.v1beta1.Instance.RedisConfigsEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.cloud.redis.v1beta1.Instance.RedisConfigsEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='google.cloud.redis.v1beta1.Instance.RedisConfigsEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=859, + serialized_end=910, +) + +_INSTANCE = _descriptor.Descriptor( + name='Instance', + full_name='google.cloud.redis.v1beta1.Instance', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.redis.v1beta1.Instance.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='display_name', full_name='google.cloud.redis.v1beta1.Instance.display_name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='labels', full_name='google.cloud.redis.v1beta1.Instance.labels', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='location_id', full_name='google.cloud.redis.v1beta1.Instance.location_id', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='alternative_location_id', full_name='google.cloud.redis.v1beta1.Instance.alternative_location_id', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='redis_version', full_name='google.cloud.redis.v1beta1.Instance.redis_version', index=5, + number=7, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='reserved_ip_range', full_name='google.cloud.redis.v1beta1.Instance.reserved_ip_range', index=6, + number=9, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='host', full_name='google.cloud.redis.v1beta1.Instance.host', index=7, + number=10, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='port', full_name='google.cloud.redis.v1beta1.Instance.port', index=8, + number=11, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='current_location_id', full_name='google.cloud.redis.v1beta1.Instance.current_location_id', index=9, + number=12, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='create_time', full_name='google.cloud.redis.v1beta1.Instance.create_time', index=10, + number=13, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='state', full_name='google.cloud.redis.v1beta1.Instance.state', index=11, + number=14, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='status_message', full_name='google.cloud.redis.v1beta1.Instance.status_message', index=12, + number=15, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='redis_configs', full_name='google.cloud.redis.v1beta1.Instance.redis_configs', index=13, + number=16, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='tier', full_name='google.cloud.redis.v1beta1.Instance.tier', index=14, + number=17, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='memory_size_gb', full_name='google.cloud.redis.v1beta1.Instance.memory_size_gb', index=15, + number=18, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='authorized_network', full_name='google.cloud.redis.v1beta1.Instance.authorized_network', index=16, + number=20, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_INSTANCE_LABELSENTRY, _INSTANCE_REDISCONFIGSENTRY, ], + enum_types=[ + _INSTANCE_STATE, + _INSTANCE_TIER, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=217, + serialized_end=1085, +) + + +_LISTINSTANCESREQUEST = _descriptor.Descriptor( + name='ListInstancesRequest', + full_name='google.cloud.redis.v1beta1.ListInstancesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.redis.v1beta1.ListInstancesRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.cloud.redis.v1beta1.ListInstancesRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.cloud.redis.v1beta1.ListInstancesRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1087, + serialized_end=1164, +) + + +_LISTINSTANCESRESPONSE = _descriptor.Descriptor( + name='ListInstancesResponse', + full_name='google.cloud.redis.v1beta1.ListInstancesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='instances', full_name='google.cloud.redis.v1beta1.ListInstancesResponse.instances', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.cloud.redis.v1beta1.ListInstancesResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1166, + serialized_end=1271, +) + + +_GETINSTANCEREQUEST = _descriptor.Descriptor( + name='GetInstanceRequest', + full_name='google.cloud.redis.v1beta1.GetInstanceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.redis.v1beta1.GetInstanceRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1273, + serialized_end=1307, +) + + +_CREATEINSTANCEREQUEST = _descriptor.Descriptor( + name='CreateInstanceRequest', + full_name='google.cloud.redis.v1beta1.CreateInstanceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.redis.v1beta1.CreateInstanceRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='instance_id', full_name='google.cloud.redis.v1beta1.CreateInstanceRequest.instance_id', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='instance', full_name='google.cloud.redis.v1beta1.CreateInstanceRequest.instance', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1309, + serialized_end=1425, +) + + +_UPDATEINSTANCEREQUEST = _descriptor.Descriptor( + name='UpdateInstanceRequest', + full_name='google.cloud.redis.v1beta1.UpdateInstanceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.cloud.redis.v1beta1.UpdateInstanceRequest.update_mask', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='instance', full_name='google.cloud.redis.v1beta1.UpdateInstanceRequest.instance', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1428, + serialized_end=1556, +) + + +_DELETEINSTANCEREQUEST = _descriptor.Descriptor( + name='DeleteInstanceRequest', + full_name='google.cloud.redis.v1beta1.DeleteInstanceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.redis.v1beta1.DeleteInstanceRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1558, + serialized_end=1595, +) + + +_LOCATIONMETADATA_AVAILABLEZONESENTRY = _descriptor.Descriptor( + name='AvailableZonesEntry', + full_name='google.cloud.redis.v1beta1.LocationMetadata.AvailableZonesEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.cloud.redis.v1beta1.LocationMetadata.AvailableZonesEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='google.cloud.redis.v1beta1.LocationMetadata.AvailableZonesEntry.value', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1709, + serialized_end=1804, +) + +_LOCATIONMETADATA = _descriptor.Descriptor( + name='LocationMetadata', + full_name='google.cloud.redis.v1beta1.LocationMetadata', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='available_zones', full_name='google.cloud.redis.v1beta1.LocationMetadata.available_zones', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_LOCATIONMETADATA_AVAILABLEZONESENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1598, + serialized_end=1804, +) + + +_ZONEMETADATA = _descriptor.Descriptor( + name='ZoneMetadata', + full_name='google.cloud.redis.v1beta1.ZoneMetadata', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1806, + serialized_end=1820, +) + +_INSTANCE_LABELSENTRY.containing_type = _INSTANCE +_INSTANCE_REDISCONFIGSENTRY.containing_type = _INSTANCE +_INSTANCE.fields_by_name['labels'].message_type = _INSTANCE_LABELSENTRY +_INSTANCE.fields_by_name['create_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_INSTANCE.fields_by_name['state'].enum_type = _INSTANCE_STATE +_INSTANCE.fields_by_name['redis_configs'].message_type = _INSTANCE_REDISCONFIGSENTRY +_INSTANCE.fields_by_name['tier'].enum_type = _INSTANCE_TIER +_INSTANCE_STATE.containing_type = _INSTANCE +_INSTANCE_TIER.containing_type = _INSTANCE +_LISTINSTANCESRESPONSE.fields_by_name['instances'].message_type = _INSTANCE +_CREATEINSTANCEREQUEST.fields_by_name['instance'].message_type = _INSTANCE +_UPDATEINSTANCEREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_UPDATEINSTANCEREQUEST.fields_by_name['instance'].message_type = _INSTANCE +_LOCATIONMETADATA_AVAILABLEZONESENTRY.fields_by_name['value'].message_type = _ZONEMETADATA +_LOCATIONMETADATA_AVAILABLEZONESENTRY.containing_type = _LOCATIONMETADATA +_LOCATIONMETADATA.fields_by_name['available_zones'].message_type = _LOCATIONMETADATA_AVAILABLEZONESENTRY +DESCRIPTOR.message_types_by_name['Instance'] = _INSTANCE +DESCRIPTOR.message_types_by_name['ListInstancesRequest'] = _LISTINSTANCESREQUEST +DESCRIPTOR.message_types_by_name['ListInstancesResponse'] = _LISTINSTANCESRESPONSE +DESCRIPTOR.message_types_by_name['GetInstanceRequest'] = _GETINSTANCEREQUEST +DESCRIPTOR.message_types_by_name['CreateInstanceRequest'] = _CREATEINSTANCEREQUEST +DESCRIPTOR.message_types_by_name['UpdateInstanceRequest'] = _UPDATEINSTANCEREQUEST +DESCRIPTOR.message_types_by_name['DeleteInstanceRequest'] = _DELETEINSTANCEREQUEST +DESCRIPTOR.message_types_by_name['LocationMetadata'] = _LOCATIONMETADATA +DESCRIPTOR.message_types_by_name['ZoneMetadata'] = _ZONEMETADATA +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Instance = _reflection.GeneratedProtocolMessageType('Instance', (_message.Message,), dict( + + LabelsEntry = _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), dict( + DESCRIPTOR = _INSTANCE_LABELSENTRY, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.Instance.LabelsEntry) + )) + , + + RedisConfigsEntry = _reflection.GeneratedProtocolMessageType('RedisConfigsEntry', (_message.Message,), dict( + DESCRIPTOR = _INSTANCE_REDISCONFIGSENTRY, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.Instance.RedisConfigsEntry) + )) + , + DESCRIPTOR = _INSTANCE, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + , + __doc__ = """A Google Cloud Redis instance. + + + Attributes: + name: + Required. Unique name of the resource in this scope including + project and location using the form: ``projects/{project_id}/l + ocations/{location_id}/instances/{instance_id}`` Note: Redis + instances are managed and addressed at regional level so + location\_id here refers to a GCP region; however, users get + to choose which specific zone (or collection of zones for + cross-zone instances) an instance should be provisioned in. + Refer to [location\_id] and [alternative\_location\_id] fields + for more details. + display_name: + An arbitrary and optional user-provided name for the instance. + labels: + Resource labels to represent user provided metadata + location_id: + Optional. The zone where the instance will be provisioned. If + not provided, the service will choose a zone for the instance. + For STANDARD\_HA tier, instances will be created across two + zones for protection against zonal failures. if + [alternative\_location\_id] is also provided, it must be + different from [location\_id]. + alternative_location_id: + Optional. Only applicable to STANDARD\_HA tier which protects + the instance against zonal failures by provisioning it across + two zones. If provided, it must be a different zone from the + one provided in [location\_id]. + redis_version: + Optional. The version of Redis software. If not provided, + latest supported version will be used. + reserved_ip_range: + Optional. The CIDR range of internal addresses that are + reserved for this instance. If not provided, the service will + choose an unused /29 block, for example, 10.0.0.0/29 or + 192.168.0.0/29. Ranges must be unique and non-overlapping with + existing subnets in a network. + host: + Output only. Hostname or IP address of the exposed redis + endpoint used by clients to connect to the service. + port: + Output only. The port number of the exposed redis endpoint. + current_location_id: + Output only. The current zone where the Redis endpoint is + placed. In single zone deployments, this will always be the + same as [location\_id] provided by the user at creation time. + In cross-zone instances (only applicable in STANDARD\_HA + tier), this can be either [location\_id] or + [alternative\_location\_id] and can change on a failover + event. + create_time: + Output only. The time the instance was created. + state: + Output only. The current state of this instance. + status_message: + Output only. Additional information about the current status + of this instance, if available. + redis_configs: + Optional. Redis configuration parameters, according to + http://redis.io/topics/config. Currently, the only supported + parameters are: \* maxmemory-policy \* notify-keyspace-events + tier: + Required. The service tier of the instance. + memory_size_gb: + Required. Redis memory size in GB. + authorized_network: + Optional. The full name of the Google Compute Engine `network + `__ to which + the instance is connected. If left unspecified, the + ``default`` network will be used. + """, + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.Instance) + )) +_sym_db.RegisterMessage(Instance) +_sym_db.RegisterMessage(Instance.LabelsEntry) +_sym_db.RegisterMessage(Instance.RedisConfigsEntry) + +ListInstancesRequest = _reflection.GeneratedProtocolMessageType('ListInstancesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTINSTANCESREQUEST, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + , + __doc__ = """Request for + [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances]. + + + Attributes: + parent: + Required. The resource name of the instance location using the + form: ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + page_size: + The maximum number of items to return. If not specified, a + default value of 1000 will be used by the service. Regardless + of the page\_size value, the response may include a partial + list and a caller should only rely on response's [next\_page\_ + token][CloudRedis.ListInstancesResponse.next\_page\_token] to + determine if there are more instances left to be queried. + page_token: + The next\_page\_token value returned from a previous List + request, if any. + """, + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.ListInstancesRequest) + )) +_sym_db.RegisterMessage(ListInstancesRequest) + +ListInstancesResponse = _reflection.GeneratedProtocolMessageType('ListInstancesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTINSTANCESRESPONSE, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + , + __doc__ = """Response for + [ListInstances][google.cloud.redis.v1beta1.CloudRedis.ListInstances]. + + + Attributes: + instances: + A list of Redis instances in the project in the specified + location, or across all locations. If the ``location_id`` in + the parent field of the request is "-", all regions available + to the project are queried, and the results aggregated. If in + such an aggregated query a location is unavailable, a dummy + Redis entry is included in the response with the "name" field + set to a value of the form + projects/{project\_id}/locations/{location\_id}/instances/- + and the "status" field set to ERROR and "status\_message" + field set to "location not available for ListInstances". + next_page_token: + Token to retrieve the next page of results, or empty if there + are no more results in the list. + """, + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.ListInstancesResponse) + )) +_sym_db.RegisterMessage(ListInstancesResponse) + +GetInstanceRequest = _reflection.GeneratedProtocolMessageType('GetInstanceRequest', (_message.Message,), dict( + DESCRIPTOR = _GETINSTANCEREQUEST, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + , + __doc__ = """Request for + [GetInstance][google.cloud.redis.v1beta1.CloudRedis.GetInstance]. + + + Attributes: + name: + Required. Redis instance resource name using the form: ``proje + cts/{project_id}/locations/{location_id}/instances/{instance_i + d}`` where ``location_id`` refers to a GCP region + """, + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.GetInstanceRequest) + )) +_sym_db.RegisterMessage(GetInstanceRequest) + +CreateInstanceRequest = _reflection.GeneratedProtocolMessageType('CreateInstanceRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATEINSTANCEREQUEST, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + , + __doc__ = """Request for + [CreateInstance][google.cloud.redis.v1beta1.CloudRedis.CreateInstance]. + + + Attributes: + parent: + Required. The resource name of the instance location using the + form: ``projects/{project_id}/locations/{location_id}`` where + ``location_id`` refers to a GCP region + instance_id: + Required. The logical name of the Redis instance in the + customer project with the following restrictions: - Must + contain only lowercase letters, numbers, and hyphens. - Must + start with a letter. - Must be between 1-40 characters. - + Must end with a number or a letter. - Must be unique within + the customer project / location + instance: + Required. A Redis [Instance] resource + """, + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.CreateInstanceRequest) + )) +_sym_db.RegisterMessage(CreateInstanceRequest) + +UpdateInstanceRequest = _reflection.GeneratedProtocolMessageType('UpdateInstanceRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATEINSTANCEREQUEST, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + , + __doc__ = """Request for + [UpdateInstance][google.cloud.redis.v1beta1.CloudRedis.UpdateInstance]. + + + Attributes: + update_mask: + Required. Mask of fields to update. At least one path must be + supplied in this field. The elements of the repeated paths + field may only include these fields from + [Instance][CloudRedis.Instance]: \* ``display_name`` \* + ``labels`` \* ``memory_size_gb`` \* ``redis_config`` + instance: + Required. Update description. Only fields specified in + update\_mask are updated. + """, + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.UpdateInstanceRequest) + )) +_sym_db.RegisterMessage(UpdateInstanceRequest) + +DeleteInstanceRequest = _reflection.GeneratedProtocolMessageType('DeleteInstanceRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETEINSTANCEREQUEST, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + , + __doc__ = """Request for + [DeleteInstance][google.cloud.redis.v1beta1.CloudRedis.DeleteInstance]. + + + Attributes: + name: + Required. Redis instance resource name using the form: ``proje + cts/{project_id}/locations/{location_id}/instances/{instance_i + d}`` where ``location_id`` refers to a GCP region + """, + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.DeleteInstanceRequest) + )) +_sym_db.RegisterMessage(DeleteInstanceRequest) + +LocationMetadata = _reflection.GeneratedProtocolMessageType('LocationMetadata', (_message.Message,), dict( + + AvailableZonesEntry = _reflection.GeneratedProtocolMessageType('AvailableZonesEntry', (_message.Message,), dict( + DESCRIPTOR = _LOCATIONMETADATA_AVAILABLEZONESENTRY, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.LocationMetadata.AvailableZonesEntry) + )) + , + DESCRIPTOR = _LOCATIONMETADATA, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + , + __doc__ = """This location metadata represents additional configuration options for a + given location where a Redis instance may be created. All fields are + output only. It is returned as content of the + ``google.cloud.location.Location.metadata`` field. + + + Attributes: + available_zones: + Output only. The set of available zones in the location. The + map is keyed by the lowercase ID of each zone, as defined by + GCE. These keys can be specified in ``location_id`` or + ``alternative_location_id`` fields when creating a Redis + instance. + """, + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.LocationMetadata) + )) +_sym_db.RegisterMessage(LocationMetadata) +_sym_db.RegisterMessage(LocationMetadata.AvailableZonesEntry) + +ZoneMetadata = _reflection.GeneratedProtocolMessageType('ZoneMetadata', (_message.Message,), dict( + DESCRIPTOR = _ZONEMETADATA, + __module__ = 'google.cloud.redis_v1beta1.proto.cloud_redis_pb2' + , + __doc__ = """Defines specific information for a particular zone. Currently empty and + reserved for future use only. + """, + # @@protoc_insertion_point(class_scope:google.cloud.redis.v1beta1.ZoneMetadata) + )) +_sym_db.RegisterMessage(ZoneMetadata) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\036com.google.cloud.redis.v1beta1B\032CloudRedisServiceBetaProtoP\001Z?google.golang.org/genproto/googleapis/cloud/redis/v1beta1;redis')) +_INSTANCE_LABELSENTRY.has_options = True +_INSTANCE_LABELSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_INSTANCE_REDISCONFIGSENTRY.has_options = True +_INSTANCE_REDISCONFIGSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +_LOCATIONMETADATA_AVAILABLEZONESENTRY.has_options = True +_LOCATIONMETADATA_AVAILABLEZONESENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) + +_CLOUDREDIS = _descriptor.ServiceDescriptor( + name='CloudRedis', + full_name='google.cloud.redis.v1beta1.CloudRedis', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=1823, + serialized_end=2688, + methods=[ + _descriptor.MethodDescriptor( + name='ListInstances', + full_name='google.cloud.redis.v1beta1.CloudRedis.ListInstances', + index=0, + containing_service=None, + input_type=_LISTINSTANCESREQUEST, + output_type=_LISTINSTANCESRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0024\0222/v1beta1/{parent=projects/*/locations/*}/instances')), + ), + _descriptor.MethodDescriptor( + name='GetInstance', + full_name='google.cloud.redis.v1beta1.CloudRedis.GetInstance', + index=1, + containing_service=None, + input_type=_GETINSTANCEREQUEST, + output_type=_INSTANCE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0024\0222/v1beta1/{name=projects/*/locations/*/instances/*}')), + ), + _descriptor.MethodDescriptor( + name='CreateInstance', + full_name='google.cloud.redis.v1beta1.CloudRedis.CreateInstance', + index=2, + containing_service=None, + input_type=_CREATEINSTANCEREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002>\"2/v1beta1/{parent=projects/*/locations/*}/instances:\010instance')), + ), + _descriptor.MethodDescriptor( + name='UpdateInstance', + full_name='google.cloud.redis.v1beta1.CloudRedis.UpdateInstance', + index=3, + containing_service=None, + input_type=_UPDATEINSTANCEREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002G2;/v1beta1/{instance.name=projects/*/locations/*/instances/*}:\010instance')), + ), + _descriptor.MethodDescriptor( + name='DeleteInstance', + full_name='google.cloud.redis.v1beta1.CloudRedis.DeleteInstance', + index=4, + containing_service=None, + input_type=_DELETEINSTANCEREQUEST, + output_type=google_dot_longrunning_dot_operations__pb2._OPERATION, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0024*2/v1beta1/{name=projects/*/locations/*/instances/*}')), + ), +]) +_sym_db.RegisterServiceDescriptor(_CLOUDREDIS) + +DESCRIPTOR.services_by_name['CloudRedis'] = _CLOUDREDIS + +# @@protoc_insertion_point(module_scope) diff --git a/redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2_grpc.py b/redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2_grpc.py new file mode 100644 index 000000000000..616a9f05e25e --- /dev/null +++ b/redis/google/cloud/redis_v1beta1/proto/cloud_redis_pb2_grpc.py @@ -0,0 +1,166 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.redis_v1beta1.proto import cloud_redis_pb2 as google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2 +from google.longrunning import operations_pb2 as google_dot_longrunning_dot_operations__pb2 + + +class CloudRedisStub(object): + """Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1beta1 + + The `redis.googleapis.com` service implements the Google Cloud Memorystore + for Redis API and defines the following resource model for managing Redis + instances: + * The service works with a collection of cloud projects, named: `/projects/*` + * Each project has a collection of available locations, named: `/locations/*` + * Each location has a collection of Redis instances, named: `/instances/*` + * As such, Redis instances are resources of the form: + `/projects/{project_id}/locations/{location_id}/instances/{instance_id}` + + Note that location_id must be refering to a GCP `region`; for example: + * `projects/redpepper-1290/locations/us-central1/instances/my-redis` + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListInstances = channel.unary_unary( + '/google.cloud.redis.v1beta1.CloudRedis/ListInstances', + request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ListInstancesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ListInstancesResponse.FromString, + ) + self.GetInstance = channel.unary_unary( + '/google.cloud.redis.v1beta1.CloudRedis/GetInstance', + request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.GetInstanceRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.Instance.FromString, + ) + self.CreateInstance = channel.unary_unary( + '/google.cloud.redis.v1beta1.CloudRedis/CreateInstance', + request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.CreateInstanceRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.UpdateInstance = channel.unary_unary( + '/google.cloud.redis.v1beta1.CloudRedis/UpdateInstance', + request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.UpdateInstanceRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + self.DeleteInstance = channel.unary_unary( + '/google.cloud.redis.v1beta1.CloudRedis/DeleteInstance', + request_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.DeleteInstanceRequest.SerializeToString, + response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, + ) + + +class CloudRedisServicer(object): + """Configures and manages Cloud Memorystore for Redis instances + + Google Cloud Memorystore for Redis v1beta1 + + The `redis.googleapis.com` service implements the Google Cloud Memorystore + for Redis API and defines the following resource model for managing Redis + instances: + * The service works with a collection of cloud projects, named: `/projects/*` + * Each project has a collection of available locations, named: `/locations/*` + * Each location has a collection of Redis instances, named: `/instances/*` + * As such, Redis instances are resources of the form: + `/projects/{project_id}/locations/{location_id}/instances/{instance_id}` + + Note that location_id must be refering to a GCP `region`; for example: + * `projects/redpepper-1290/locations/us-central1/instances/my-redis` + """ + + def ListInstances(self, request, context): + """Lists all Redis instances owned by a project in either the specified + location (region) or all locations. + + The location should have the following format: + * `projects/{project_id}/locations/{location_id}` + + If `location_id` is specified as `-` (wildcard), then all regions + available to the project are queried, and the results are aggregated. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetInstance(self, request, context): + """Gets the details of a specific Redis instance. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateInstance(self, request, context): + """Creates a Redis instance based on the specified tier and memory size. + + By default, the instance is peered to the project's + [default network](/compute/docs/networks-and-firewalls#networks). + + The creation is executed asynchronously and callers may check the returned + operation to track its progress. Once the operation is completed the Redis + instance will be fully functional. Completed longrunning.Operation will + contain the new instance object in the response field. + + The returned operation is automatically deleted after a few hours, so there + is no need to call DeleteOperation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateInstance(self, request, context): + """Updates the metadata and configuration of a specific Redis instance. + + Completed longrunning.Operation will contain the new instance object + in the response field. The returned operation is automatically deleted + after a few hours, so there is no need to call DeleteOperation. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteInstance(self, request, context): + """Deletes a specific Redis instance. Instance stops serving and data is + deleted. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_CloudRedisServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListInstances': grpc.unary_unary_rpc_method_handler( + servicer.ListInstances, + request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ListInstancesRequest.FromString, + response_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.ListInstancesResponse.SerializeToString, + ), + 'GetInstance': grpc.unary_unary_rpc_method_handler( + servicer.GetInstance, + request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.GetInstanceRequest.FromString, + response_serializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.Instance.SerializeToString, + ), + 'CreateInstance': grpc.unary_unary_rpc_method_handler( + servicer.CreateInstance, + request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.CreateInstanceRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'UpdateInstance': grpc.unary_unary_rpc_method_handler( + servicer.UpdateInstance, + request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.UpdateInstanceRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + 'DeleteInstance': grpc.unary_unary_rpc_method_handler( + servicer.DeleteInstance, + request_deserializer=google_dot_cloud_dot_redis__v1beta1_dot_proto_dot_cloud__redis__pb2.DeleteInstanceRequest.FromString, + response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.redis.v1beta1.CloudRedis', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/redis/google/cloud/redis_v1beta1/types.py b/redis/google/cloud/redis_v1beta1/types.py new file mode 100644 index 000000000000..05b300468373 --- /dev/null +++ b/redis/google/cloud/redis_v1beta1/types.py @@ -0,0 +1,47 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.api_core.protobuf_helpers import get_messages + +from google.api import http_pb2 +from google.cloud.redis_v1beta1.proto import cloud_redis_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import any_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 +from google.rpc import status_pb2 + +names = [] +for module in ( + http_pb2, + cloud_redis_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, +): + for name, message in get_messages(module).items(): + message.__module__ = 'google.cloud.redis_v1beta1.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + +__all__ = tuple(sorted(names)) diff --git a/redis/nox.py b/redis/nox.py new file mode 100644 index 000000000000..b092b754c40f --- /dev/null +++ b/redis/nox.py @@ -0,0 +1,107 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import os + +import nox + + +LOCAL_DEPS = ( + os.path.join('..', 'api_core'), + os.path.join('..', 'core'), +) + + +@nox.session +def default(session): + """Default unit test session. + + This is intended to be run **without** an interpreter set, so + that the current ``python`` (on the ``PATH``) or the version of + Python corresponding to the ``nox`` binary the ``PATH`` can + run the tests. + """ + # Install all test dependencies, then install this package in-place. + session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) + session.install('-e', '.') + + # Run py.test against the unit tests. + session.run( + 'py.test', + '--quiet', + '--cov=google.cloud.redis', + '--cov=google.cloud.redis_v1beta1', + '--cov=tests.unit', + '--cov-append', + '--cov-config=.coveragerc', + '--cov-report=', + '--cov-fail-under=97', + 'tests/unit', + *session.posargs + ) + + +@nox.session +@nox.parametrize('py', ['2.7', '3.5', '3.6']) +def unit(session, py): + """Run the unit test suite.""" + + # Run unit tests against all supported versions of Python. + if py != 'default': + session.interpreter = 'python{}'.format(py) + # Set the virtualenv directory name. + session.virtualenv_dirname = 'unit-' + py + + default(session) + + +@nox.session +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.interpreter = 'python3.6' + session.install('flake8', *LOCAL_DEPS) + session.install('.') + session.run('flake8', 'google', 'tests') + + +@nox.session +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.interpreter = 'python3.6' + + # Set the virtualenv directory name. + session.virtualenv_dirname = 'setup' + + session.install('docutils', 'pygments') + session.run('python', 'setup.py', 'check', '--restructuredtext', + '--strict') + + +@nox.session +def cover(session): + """Run the final coverage report. + + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.interpreter = 'python3.6' + session.install('coverage', 'pytest-cov') + session.run('coverage', 'report', '--show-missing', '--fail-under=100') + session.run('coverage', 'erase') diff --git a/redis/setup.cfg b/redis/setup.cfg new file mode 100644 index 000000000000..2a9acf13daa9 --- /dev/null +++ b/redis/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/redis/setup.py b/redis/setup.py new file mode 100644 index 000000000000..63c07704b1c7 --- /dev/null +++ b/redis/setup.py @@ -0,0 +1,87 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +import setuptools + + +# Package metadata. + +name = 'google-cloud-redis' +description = 'Cloud Redis API client library' +version = '0.1.0' +# Should be one of: +# 'Development Status :: 3 - Alpha' +# 'Development Status :: 4 - Beta' +# 'Development Status :: 5 - Production/Stable' +release_status = 'Development Status :: 3 - Alpha' +dependencies = [ + 'google-api-core >= 0.1.0, < 0.2.0dev', +] +extras = { +} + +# Setup boilerplate below this line. + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +# Only include packages under the 'google' namespace. Do not include tests, +# benchmarks, etc. +packages = [ + package for package in setuptools.find_packages() + if package.startswith('google')] + +# Determine which namespaces are needed. +namespaces = ['google'] +if 'google.cloud' in packages: + namespaces.append('google.cloud') + + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author='Google LLC', + author_email='googleapis-packages@google.com', + license='Apache 2.0', + url='https://github.com/GoogleCloudPlatform/google-cloud-python', + classifiers=[ + release_status, + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Operating System :: OS Independent', + 'Topic :: Internet', + ], + platforms='Posix; MacOS X; Windows', + packages=packages, + namespace_packages=namespaces, + install_requires=dependencies, + extras_require=extras, + include_package_data=True, + zip_safe=False, +) diff --git a/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py b/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py new file mode 100644 index 000000000000..72e615802dee --- /dev/null +++ b/redis/tests/unit/gapic/v1beta1/test_cloud_redis_client_v1beta1.py @@ -0,0 +1,367 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.rpc import status_pb2 + +from google.cloud import redis_v1beta1 +from google.cloud.redis_v1beta1 import enums +from google.cloud.redis_v1beta1.proto import cloud_redis_pb2 +from google.longrunning import operations_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestCloudRedisClient(object): + def test_list_instances(self): + # Setup Expected Response + next_page_token = '' + instances_element = {} + instances = [instances_element] + expected_response = { + 'next_page_token': next_page_token, + 'instances': instances + } + expected_response = cloud_redis_pb2.ListInstancesResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = redis_v1beta1.CloudRedisClient(channel=channel) + + # Setup Request + parent = client.location_path('[PROJECT]', '[LOCATION]') + + paged_list_response = client.list_instances(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.instances[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = cloud_redis_pb2.ListInstancesRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_instances_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = redis_v1beta1.CloudRedisClient(channel=channel) + + # Setup request + parent = client.location_path('[PROJECT]', '[LOCATION]') + + paged_list_response = client.list_instances(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_instance(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + display_name = 'displayName1615086568' + location_id = 'locationId552319461' + alternative_location_id = 'alternativeLocationId-718920621' + redis_version = 'redisVersion-685310444' + reserved_ip_range = 'reservedIpRange-1082940580' + host = 'host3208616' + port = 3446913 + current_location_id = 'currentLocationId1312712735' + status_message = 'statusMessage-239442758' + memory_size_gb = 34199707 + authorized_network = 'authorizedNetwork-1733809270' + expected_response = { + 'name': name_2, + 'display_name': display_name, + 'location_id': location_id, + 'alternative_location_id': alternative_location_id, + 'redis_version': redis_version, + 'reserved_ip_range': reserved_ip_range, + 'host': host, + 'port': port, + 'current_location_id': current_location_id, + 'status_message': status_message, + 'memory_size_gb': memory_size_gb, + 'authorized_network': authorized_network + } + expected_response = cloud_redis_pb2.Instance(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = redis_v1beta1.CloudRedisClient(channel=channel) + + # Setup Request + name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + + response = client.get_instance(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloud_redis_pb2.GetInstanceRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_instance_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = redis_v1beta1.CloudRedisClient(channel=channel) + + # Setup request + name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + + with pytest.raises(CustomException): + client.get_instance(name) + + def test_create_instance(self): + # Setup Expected Response + name = 'name3373707' + display_name = 'displayName1615086568' + location_id = 'locationId552319461' + alternative_location_id = 'alternativeLocationId-718920621' + redis_version = 'redisVersion-685310444' + reserved_ip_range = 'reservedIpRange-1082940580' + host = 'host3208616' + port = 3446913 + current_location_id = 'currentLocationId1312712735' + status_message = 'statusMessage-239442758' + memory_size_gb_2 = 1493816946 + authorized_network = 'authorizedNetwork-1733809270' + expected_response = { + 'name': name, + 'display_name': display_name, + 'location_id': location_id, + 'alternative_location_id': alternative_location_id, + 'redis_version': redis_version, + 'reserved_ip_range': reserved_ip_range, + 'host': host, + 'port': port, + 'current_location_id': current_location_id, + 'status_message': status_message, + 'memory_size_gb': memory_size_gb_2, + 'authorized_network': authorized_network + } + expected_response = cloud_redis_pb2.Instance(**expected_response) + operation = operations_pb2.Operation( + name='operations/test_create_instance', done=True) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + client = redis_v1beta1.CloudRedisClient(channel=channel) + + # Setup Request + parent = client.location_path('[PROJECT]', '[LOCATION]') + instance_id = 'test_instance' + tier = enums.Instance.Tier.BASIC + memory_size_gb = 1 + instance = {'tier': tier, 'memory_size_gb': memory_size_gb} + + response = client.create_instance(parent, instance_id, instance) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = cloud_redis_pb2.CreateInstanceRequest( + parent=parent, instance_id=instance_id, instance=instance) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_instance_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name='operations/test_create_instance_exception', done=True) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + client = redis_v1beta1.CloudRedisClient(channel=channel) + + # Setup Request + parent = client.location_path('[PROJECT]', '[LOCATION]') + instance_id = 'test_instance' + tier = enums.Instance.Tier.BASIC + memory_size_gb = 1 + instance = {'tier': tier, 'memory_size_gb': memory_size_gb} + + response = client.create_instance(parent, instance_id, instance) + exception = response.exception() + assert exception.errors[0] == error + + def test_update_instance(self): + # Setup Expected Response + name = 'name3373707' + display_name_2 = 'displayName21615000987' + location_id = 'locationId552319461' + alternative_location_id = 'alternativeLocationId-718920621' + redis_version = 'redisVersion-685310444' + reserved_ip_range = 'reservedIpRange-1082940580' + host = 'host3208616' + port = 3446913 + current_location_id = 'currentLocationId1312712735' + status_message = 'statusMessage-239442758' + memory_size_gb_2 = 1493816946 + authorized_network = 'authorizedNetwork-1733809270' + expected_response = { + 'name': name, + 'display_name': display_name_2, + 'location_id': location_id, + 'alternative_location_id': alternative_location_id, + 'redis_version': redis_version, + 'reserved_ip_range': reserved_ip_range, + 'host': host, + 'port': port, + 'current_location_id': current_location_id, + 'status_message': status_message, + 'memory_size_gb': memory_size_gb_2, + 'authorized_network': authorized_network + } + expected_response = cloud_redis_pb2.Instance(**expected_response) + operation = operations_pb2.Operation( + name='operations/test_update_instance', done=True) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + client = redis_v1beta1.CloudRedisClient(channel=channel) + + # Setup Request + paths_element = 'display_name' + paths_element_2 = 'memory_size_gb' + paths = [paths_element, paths_element_2] + update_mask = {'paths': paths} + display_name = 'UpdatedDisplayName' + memory_size_gb = 4 + instance = { + 'display_name': display_name, + 'memory_size_gb': memory_size_gb + } + + response = client.update_instance(update_mask, instance) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = cloud_redis_pb2.UpdateInstanceRequest( + update_mask=update_mask, instance=instance) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_instance_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name='operations/test_update_instance_exception', done=True) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + client = redis_v1beta1.CloudRedisClient(channel=channel) + + # Setup Request + paths_element = 'display_name' + paths_element_2 = 'memory_size_gb' + paths = [paths_element, paths_element_2] + update_mask = {'paths': paths} + display_name = 'UpdatedDisplayName' + memory_size_gb = 4 + instance = { + 'display_name': display_name, + 'memory_size_gb': memory_size_gb + } + + response = client.update_instance(update_mask, instance) + exception = response.exception() + assert exception.errors[0] == error + + def test_delete_instance(self): + # Setup Expected Response + expected_response = {} + expected_response = empty_pb2.Empty(**expected_response) + operation = operations_pb2.Operation( + name='operations/test_delete_instance', done=True) + operation.response.Pack(expected_response) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + client = redis_v1beta1.CloudRedisClient(channel=channel) + + # Setup Request + name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + + response = client.delete_instance(name) + result = response.result() + assert expected_response == result + + assert len(channel.requests) == 1 + expected_request = cloud_redis_pb2.DeleteInstanceRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_instance_exception(self): + # Setup Response + error = status_pb2.Status() + operation = operations_pb2.Operation( + name='operations/test_delete_instance_exception', done=True) + operation.error.CopyFrom(error) + + # Mock the API response + channel = ChannelStub(responses=[operation]) + client = redis_v1beta1.CloudRedisClient(channel=channel) + + # Setup Request + name = client.instance_path('[PROJECT]', '[LOCATION]', '[INSTANCE]') + + response = client.delete_instance(name) + exception = response.exception() + assert exception.errors[0] == error From 7b7456457e0613ae004e093f35d883eb2a5e979c Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 8 May 2018 10:43:28 -0700 Subject: [PATCH 02/75] Release 0.1.0 (#5310) --- redis/CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 redis/CHANGELOG.md diff --git a/redis/CHANGELOG.md b/redis/CHANGELOG.md new file mode 100644 index 000000000000..61f74dee8ec7 --- /dev/null +++ b/redis/CHANGELOG.md @@ -0,0 +1,11 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-redis/#history + +## 0.1.0 + +### New Features +Initial version of Redis client library v1beta1. + From 2e6825a873802db77ffdfd74724870c3ddc7ff11 Mon Sep 17 00:00:00 2001 From: shollyman Date: Tue, 8 May 2018 13:20:00 -0700 Subject: [PATCH 03/75] BigQuery: Add additional statistics to query plan stages. (#5307) * Add additional statistics to query plan entries. * Add started/ended properties to QueryPlanEntry * Reimplement QueryPlanEntry as API-keyed property dict * Correct comment. * Address reviewer comments: ditch _PROPERTY_TO_API_FIELD, comment alignment, and explicit None returns for start/end --- bigquery/google/cloud/bigquery/job.py | 295 ++++++++++++++++++-------- bigquery/tests/unit/test_job.py | 167 +++++++++++---- 2 files changed, 329 insertions(+), 133 deletions(-) diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index c6436a85d11e..e9c3bfa783ec 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -2475,115 +2475,232 @@ def __eq__(self, other): class QueryPlanEntry(object): - """Map a single entry in a query plan. + """QueryPlanEntry represents a single stage of a query execution plan. - :type name: str - :param name: name of the entry + See + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs + for the underlying API representation within query statistics. - :type entry_id: int - :param entry_id: ID of the entry + """ - :type wait_ratio_avg: float - :param wait_ratio_avg: average wait ratio + def __init__(self): + self._properties = {} - :type wait_ratio_max: float - :param wait_ratio_max: maximum wait ratio + @classmethod + def from_api_repr(cls, resource): + """Factory: construct instance from the JSON repr. - :type read_ratio_avg: float - :param read_ratio_avg: average read ratio + Args: + resource(Dict[str: object]): + ExplainQueryStage representation returned from API - :type read_ratio_max: float - :param read_ratio_max: maximum read ratio + Returns: + google.cloud.bigquery.QueryPlanEntry: + Query plan entry parsed from ``resource`` + """ + entry = cls() + entry._properties = resource + return entry - :type compute_ratio_avg: float - :param compute_ratio_avg: average compute ratio + @property + def name(self): + """Union[str, None]: Human-readable name of the stage.""" + return self._properties.get('name') - :type compute_ratio_max: float - :param compute_ratio_max: maximum compute ratio + @property + def entry_id(self): + """Union[str, None]: Unique ID for the stage within the plan.""" + return self._properties.get('id') - :type write_ratio_avg: float - :param write_ratio_avg: average write ratio + @property + def start(self): + """Union[Datetime, None]: Datetime when the stage started.""" + if self._properties.get('startMs') is None: + return None + return _datetime_from_microseconds( + self._properties.get('startMs') * 1000.0) - :type write_ratio_max: float - :param write_ratio_max: maximum write ratio + @property + def end(self): + """Union[Datetime, None]: Datetime when the stage ended.""" + if self._properties.get('endMs') is None: + return None + return _datetime_from_microseconds( + self._properties.get('endMs') * 1000.0) - :type records_read: int - :param records_read: number of records read + @property + def input_stages(self): + """List(int): Entry IDs for stages that were inputs for this stage.""" + return self._properties.get('inputStages', []) - :type records_written: int - :param records_written: number of records written + @property + def parallel_inputs(self): + """Union[int, None]: Number of parallel input segments within + the stage. + """ + return self._properties.get('parallelInputs') - :type status: str - :param status: entry status + @property + def completed_parallel_inputs(self): + """Union[int, None]: Number of parallel input segments completed.""" + return self._properties.get('completedParallelInputs') - :type steps: List(QueryPlanEntryStep) - :param steps: steps in the entry - """ - def __init__(self, - name, - entry_id, - wait_ratio_avg, - wait_ratio_max, - read_ratio_avg, - read_ratio_max, - compute_ratio_avg, - compute_ratio_max, - write_ratio_avg, - write_ratio_max, - records_read, - records_written, - status, - steps): - self.name = name - self.entry_id = entry_id - self.wait_ratio_avg = wait_ratio_avg - self.wait_ratio_max = wait_ratio_max - self.read_ratio_avg = read_ratio_avg - self.read_ratio_max = read_ratio_max - self.compute_ratio_avg = compute_ratio_avg - self.compute_ratio_max = compute_ratio_max - self.write_ratio_avg = write_ratio_avg - self.write_ratio_max = write_ratio_max - self.records_read = records_read - self.records_written = records_written - self.status = status - self.steps = steps + @property + def wait_ms_avg(self): + """Union[int, None]: Milliseconds the average worker spent waiting to + be scheduled. + """ + return self._properties.get('waitMsAvg') - @classmethod - def from_api_repr(cls, resource): - """Factory: construct instance from the JSON repr. + @property + def wait_ms_max(self): + """Union[int, None]: Milliseconds the slowest worker spent waiting to + be scheduled. + """ + return self._properties.get('waitMsMax') - :type resource: dict - :param resource: JSON representation of the entry + @property + def wait_ratio_avg(self): + """Union[float, None]: Ratio of time the average worker spent waiting + to be scheduled, relative to the longest time spent by any worker in + any stage of the overall plan. + """ + return self._properties.get('waitRatioAvg') - :rtype: :class:`QueryPlanEntry` - :return: new instance built from the resource + @property + def wait_ratio_max(self): + """Union[float, None]: Ratio of time the slowest worker spent waiting + to be scheduled, relative to the longest time spent by any worker in + any stage of the overall plan. """ - records_read = resource.get('recordsRead') - if records_read is not None: - records_read = int(records_read) + return self._properties.get('waitRatioMax') - records_written = resource.get('recordsWritten') - if records_written is not None: - records_written = int(records_written) + @property + def read_ms_avg(self): + """Union[int, None]: Milliseconds the average worker spent reading + input. + """ + return self._properties.get('readMsAvg') - return cls( - name=resource.get('name'), - entry_id=resource.get('id'), - wait_ratio_avg=resource.get('waitRatioAvg'), - wait_ratio_max=resource.get('waitRatioMax'), - read_ratio_avg=resource.get('readRatioAvg'), - read_ratio_max=resource.get('readRatioMax'), - compute_ratio_avg=resource.get('computeRatioAvg'), - compute_ratio_max=resource.get('computeRatioMax'), - write_ratio_avg=resource.get('writeRatioAvg'), - write_ratio_max=resource.get('writeRatioMax'), - records_read=records_read, - records_written=records_written, - status=resource.get('status'), - steps=[QueryPlanEntryStep.from_api_repr(step) - for step in resource.get('steps', ())], - ) + @property + def read_ms_max(self): + """Union[int, None]: Milliseconds the slowest worker spent reading + input. + """ + return self._properties.get('readMsMax') + + @property + def read_ratio_avg(self): + """Union[float, None]: Ratio of time the average worker spent reading + input, relative to the longest time spent by any worker in any stage + of the overall plan. + """ + return self._properties.get('readRatioAvg') + + @property + def read_ratio_max(self): + """Union[float, None]: Ratio of time the slowest worker spent reading + to be scheduled, relative to the longest time spent by any worker in + any stage of the overall plan. + """ + return self._properties.get('readRatioMax') + + @property + def compute_ms_avg(self): + """Union[int, None]: Milliseconds the average worker spent on CPU-bound + processing. + """ + return self._properties.get('computeMsAvg') + + @property + def compute_ms_max(self): + """Union[int, None]: Milliseconds the slowest worker spent on CPU-bound + processing. + """ + return self._properties.get('computeMsMax') + + @property + def compute_ratio_avg(self): + """Union[float, None]: Ratio of time the average worker spent on + CPU-bound processing, relative to the longest time spent by any + worker in any stage of the overall plan. + """ + return self._properties.get('computeRatioAvg') + + @property + def compute_ratio_max(self): + """Union[float, None]: Ratio of time the slowest worker spent on + CPU-bound processing, relative to the longest time spent by any + worker in any stage of the overall plan. + """ + return self._properties.get('computeRatioMax') + + @property + def write_ms_avg(self): + """Union[int, None]: Milliseconds the average worker spent writing + output data. + """ + return self._properties.get('writeMsAvg') + + @property + def write_ms_max(self): + """Union[int, None]: Milliseconds the slowest worker spent writing + output data. + """ + return self._properties.get('writeMsMax') + + @property + def write_ratio_avg(self): + """Union[float, None]: Ratio of time the average worker spent writing + output data, relative to the longest time spent by any worker in any + stage of the overall plan. + """ + return self._properties.get('writeRatioAvg') + + @property + def write_ratio_max(self): + """Union[float, None]: Ratio of time the slowest worker spent writing + output data, relative to the longest time spent by any worker in any + stage of the overall plan. + """ + return self._properties.get('writeRatioMax') + + @property + def records_read(self): + """Union[int, None]: Number of records read by this stage.""" + return self._properties.get('recordsRead') + + @property + def records_written(self): + """Union[int, None]: Number of records written by this stage.""" + return self._properties.get('recordsWritten') + + @property + def status(self): + """Union[str, None]: status of this stage.""" + return self._properties.get('status') + + @property + def shuffle_output_bytes(self): + """Union[int, None]: Number of bytes written by this stage to + intermediate shuffle. + """ + return self._properties.get('shuffleOutputBytes') + + @property + def shuffle_output_bytes_spilled(self): + """Union[int, None]: Number of bytes written by this stage to + intermediate shuffle and spilled to disk. + """ + return self._properties.get('shuffleOutputBytesSpilled') + + @property + def steps(self): + """List(QueryPlanEntryStep): List of step operations performed by + each worker in the stage. + """ + return [QueryPlanEntryStep.from_api_repr(step) + for step in self._properties.get('steps', [])] class UnknownJob(_AsyncJob): diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index 297aab771bab..a531a0976942 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -25,6 +25,7 @@ from google.cloud.bigquery.job import LoadJobConfig from google.cloud.bigquery.dataset import DatasetReference from google.cloud.bigquery.table import EncryptionConfiguration +from google.cloud._helpers import _RFC3339_MICROS import mock @@ -2289,17 +2290,32 @@ def test_query_plan(self): plan_entries = [{ 'name': 'NAME', 'id': 1234, + 'inputStages': [88, 101], + 'startMs': 1522540800000, + 'endMs': 1522540804000, + 'parallelInputs': 1000, + 'completedParallelInputs': 5, + 'waitMsAvg': 33, + 'waitMsMax': 400, 'waitRatioAvg': 2.71828, 'waitRatioMax': 3.14159, + 'readMsAvg': 45, + 'readMsMax': 90, 'readRatioAvg': 1.41421, 'readRatioMax': 1.73205, + 'computeMsAvg': 55, + 'computeMsMax': 99, 'computeRatioAvg': 0.69315, 'computeRatioMax': 1.09861, + 'writeMsAvg': 203, + 'writeMsMax': 340, 'writeRatioAvg': 3.32193, 'writeRatioMax': 2.30258, 'recordsRead': '100', 'recordsWritten': '1', 'status': 'STATUS', + 'shuffleOutputBytes': 1024, + 'shuffleOutputBytesSpilled': 1, 'steps': [{ 'kind': 'KIND', 'substeps': ['SUBSTEP1', 'SUBSTEP2'], @@ -2322,21 +2338,52 @@ def test_query_plan(self): self.assertIsInstance(found, QueryPlanEntry) self.assertEqual(found.name, expected['name']) self.assertEqual(found.entry_id, expected['id']) + self.assertEqual( + len(found.input_stages), + len(expected['inputStages'])) + for f_id in found.input_stages: + self.assertIn(f_id, expected['inputStages']) + self.assertEqual( + found.start.strftime(_RFC3339_MICROS), + '2018-04-01T00:00:00.000000Z') + self.assertEqual( + found.end.strftime(_RFC3339_MICROS), + '2018-04-01T00:00:04.000000Z') + self.assertEqual( + found.parallel_inputs, + expected['parallelInputs']) + self.assertEqual( + found.completed_parallel_inputs, + expected['completedParallelInputs']) + self.assertEqual(found.wait_ms_avg, expected['waitMsAvg']) + self.assertEqual(found.wait_ms_max, expected['waitMsMax']) self.assertEqual(found.wait_ratio_avg, expected['waitRatioAvg']) self.assertEqual(found.wait_ratio_max, expected['waitRatioMax']) + self.assertEqual(found.read_ms_avg, expected['readMsAvg']) + self.assertEqual(found.read_ms_max, expected['readMsMax']) self.assertEqual(found.read_ratio_avg, expected['readRatioAvg']) self.assertEqual(found.read_ratio_max, expected['readRatioMax']) + self.assertEqual(found.compute_ms_avg, expected['computeMsAvg']) + self.assertEqual(found.compute_ms_max, expected['computeMsMax']) self.assertEqual( found.compute_ratio_avg, expected['computeRatioAvg']) self.assertEqual( found.compute_ratio_max, expected['computeRatioMax']) + self.assertEqual(found.write_ms_avg, expected['writeMsAvg']) + self.assertEqual(found.write_ms_max, expected['writeMsMax']) self.assertEqual(found.write_ratio_avg, expected['writeRatioAvg']) self.assertEqual(found.write_ratio_max, expected['writeRatioMax']) self.assertEqual( - found.records_read, int(expected['recordsRead'])) + found.records_read, expected['recordsRead']) self.assertEqual( - found.records_written, int(expected['recordsWritten'])) + found.records_written, expected['recordsWritten']) self.assertEqual(found.status, expected['status']) + self.assertEqual( + found.shuffle_output_bytes, + expected['shuffleOutputBytes']) + self.assertEqual( + found.shuffle_output_bytes_spilled, + expected['shuffleOutputBytesSpilled']) self.assertEqual(len(found.steps), len(expected['steps'])) for f_step, e_step in zip(found.steps, expected['steps']): @@ -3256,17 +3303,35 @@ def test___eq___wrong_type(self): class TestQueryPlanEntry(unittest.TestCase, _Base): NAME = 'NAME' ENTRY_ID = 1234 + START_MS = 1522540800000 + END_MS = 1522540804000 + INPUT_STAGES = (88, 101) + PARALLEL_INPUTS = 1000 + COMPLETED_PARALLEL_INPUTS = 5 + WAIT_MS_AVG = 33 + WAIT_MS_MAX = 400 WAIT_RATIO_AVG = 2.71828 WAIT_RATIO_MAX = 3.14159 + READ_MS_AVG = 45 + READ_MS_MAX = 90 READ_RATIO_AVG = 1.41421 READ_RATIO_MAX = 1.73205 + COMPUTE_MS_AVG = 55 + COMPUTE_MS_MAX = 99 COMPUTE_RATIO_AVG = 0.69315 COMPUTE_RATIO_MAX = 1.09861 + WRITE_MS_AVG = 203 + WRITE_MS_MAX = 340 WRITE_RATIO_AVG = 3.32193 WRITE_RATIO_MAX = 2.30258 RECORDS_READ = 100 RECORDS_WRITTEN = 1 STATUS = 'STATUS' + SHUFFLE_OUTPUT_BYTES = 1024 + SHUFFLE_OUTPUT_BYTES_SPILLED = 1 + + START_RFC3339_MICROS = '2018-04-01T00:00:00.000000Z' + END_RFC3339_MICROS = '2018-04-01T00:00:04.000000Z' @staticmethod def _get_target_class(): @@ -3274,46 +3339,6 @@ def _get_target_class(): return QueryPlanEntry - def _make_one(self, *args, **kw): - return self._get_target_class()(*args, **kw) - - def test_ctor(self): - from google.cloud.bigquery.job import QueryPlanEntryStep - - steps = [QueryPlanEntryStep( - kind=TestQueryPlanEntryStep.KIND, - substeps=TestQueryPlanEntryStep.SUBSTEPS)] - entry = self._make_one( - name=self.NAME, - entry_id=self.ENTRY_ID, - wait_ratio_avg=self.WAIT_RATIO_AVG, - wait_ratio_max=self.WAIT_RATIO_MAX, - read_ratio_avg=self.READ_RATIO_AVG, - read_ratio_max=self.READ_RATIO_MAX, - compute_ratio_avg=self.COMPUTE_RATIO_AVG, - compute_ratio_max=self.COMPUTE_RATIO_MAX, - write_ratio_avg=self.WRITE_RATIO_AVG, - write_ratio_max=self.WRITE_RATIO_MAX, - records_read=self.RECORDS_READ, - records_written=self.RECORDS_WRITTEN, - status=self.STATUS, - steps=steps, - ) - self.assertEqual(entry.name, self.NAME) - self.assertEqual(entry.entry_id, self.ENTRY_ID) - self.assertEqual(entry.wait_ratio_avg, self.WAIT_RATIO_AVG) - self.assertEqual(entry.wait_ratio_max, self.WAIT_RATIO_MAX) - self.assertEqual(entry.read_ratio_avg, self.READ_RATIO_AVG) - self.assertEqual(entry.read_ratio_max, self.READ_RATIO_MAX) - self.assertEqual(entry.compute_ratio_avg, self.COMPUTE_RATIO_AVG) - self.assertEqual(entry.compute_ratio_max, self.COMPUTE_RATIO_MAX) - self.assertEqual(entry.write_ratio_avg, self.WRITE_RATIO_AVG) - self.assertEqual(entry.write_ratio_max, self.WRITE_RATIO_MAX) - self.assertEqual(entry.records_read, self.RECORDS_READ) - self.assertEqual(entry.records_written, self.RECORDS_WRITTEN) - self.assertEqual(entry.status, self.STATUS) - self.assertEqual(entry.steps, steps) - def test_from_api_repr_empty(self): klass = self._get_target_class() @@ -3321,17 +3346,32 @@ def test_from_api_repr_empty(self): self.assertIsNone(entry.name) self.assertIsNone(entry.entry_id) + self.assertEqual(entry.input_stages, []) + self.assertIsNone(entry.start) + self.assertIsNone(entry.end) + self.assertIsNone(entry.parallel_inputs) + self.assertIsNone(entry.completed_parallel_inputs) + self.assertIsNone(entry.wait_ms_avg) + self.assertIsNone(entry.wait_ms_max) self.assertIsNone(entry.wait_ratio_avg) self.assertIsNone(entry.wait_ratio_max) + self.assertIsNone(entry.read_ms_avg) + self.assertIsNone(entry.read_ms_max) self.assertIsNone(entry.read_ratio_avg) self.assertIsNone(entry.read_ratio_max) + self.assertIsNone(entry.compute_ms_avg) + self.assertIsNone(entry.compute_ms_max) self.assertIsNone(entry.compute_ratio_avg) self.assertIsNone(entry.compute_ratio_max) + self.assertIsNone(entry.write_ms_avg) + self.assertIsNone(entry.write_ms_max) self.assertIsNone(entry.write_ratio_avg) self.assertIsNone(entry.write_ratio_max) self.assertIsNone(entry.records_read) self.assertIsNone(entry.records_written) self.assertIsNone(entry.status) + self.assertIsNone(entry.shuffle_output_bytes) + self.assertIsNone(entry.shuffle_output_bytes_spilled) self.assertEqual(entry.steps, []) def test_from_api_repr_normal(self): @@ -3343,17 +3383,30 @@ def test_from_api_repr_normal(self): resource = { 'name': self.NAME, 'id': self.ENTRY_ID, + 'inputStages': self.INPUT_STAGES, + 'startMs': self.START_MS, + 'endMs': self.END_MS, + 'waitMsAvg': self.WAIT_MS_AVG, + 'waitMsMax': self.WAIT_MS_MAX, 'waitRatioAvg': self.WAIT_RATIO_AVG, 'waitRatioMax': self.WAIT_RATIO_MAX, + 'readMsAvg': self.READ_MS_AVG, + 'readMsMax': self.READ_MS_MAX, 'readRatioAvg': self.READ_RATIO_AVG, 'readRatioMax': self.READ_RATIO_MAX, + 'computeMsAvg': self.COMPUTE_MS_AVG, + 'computeMsMax': self.COMPUTE_MS_MAX, 'computeRatioAvg': self.COMPUTE_RATIO_AVG, 'computeRatioMax': self.COMPUTE_RATIO_MAX, + 'writeMsAvg': self.WRITE_MS_AVG, + 'writeMsMax': self.WRITE_MS_MAX, 'writeRatioAvg': self.WRITE_RATIO_AVG, 'writeRatioMax': self.WRITE_RATIO_MAX, - 'recordsRead': str(self.RECORDS_READ), - 'recordsWritten': str(self.RECORDS_WRITTEN), + 'recordsRead': self.RECORDS_READ, + 'recordsWritten': self.RECORDS_WRITTEN, 'status': self.STATUS, + 'shuffleOutputBytes': self.SHUFFLE_OUTPUT_BYTES, + 'shuffleOutputBytesSpilled': self.SHUFFLE_OUTPUT_BYTES_SPILLED, 'steps': [{ 'kind': TestQueryPlanEntryStep.KIND, 'substeps': TestQueryPlanEntryStep.SUBSTEPS, @@ -3376,3 +3429,29 @@ def test_from_api_repr_normal(self): self.assertEqual(entry.records_written, self.RECORDS_WRITTEN) self.assertEqual(entry.status, self.STATUS) self.assertEqual(entry.steps, steps) + + def test_start(self): + klass = self._get_target_class() + + entry = klass.from_api_repr({}) + self.assertEqual( + entry.start, + None) + + entry._properties['startMs'] = self.START_MS + self.assertEqual( + entry.start.strftime(_RFC3339_MICROS), + self.START_RFC3339_MICROS) + + def test_end(self): + klass = self._get_target_class() + + entry = klass.from_api_repr({}) + self.assertEqual( + entry.end, + None) + + entry._properties['endMs'] = self.END_MS + self.assertEqual( + entry.end.strftime(_RFC3339_MICROS), + self.END_RFC3339_MICROS) From 38c3e9a94f664c3532f6b5fdc75b84b7bccbc80d Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Wed, 9 May 2018 17:50:19 -0400 Subject: [PATCH 04/75] Storage: add support for KMS keys (#5259) * Add support for blob-level KMS encryption keys (#5221) * Add 'Bucket.default_kms_key_name' property. (#5222) * Expose 'Blob.kms_key_name' as read-only property (#5249) * Add system tests for GCS-KMS integration (#5257) --- storage/google/cloud/storage/blob.py | 37 +++- storage/google/cloud/storage/bucket.py | 35 +++- storage/tests/system.py | 131 ++++++++++++ storage/tests/unit/test_blob.py | 277 +++++++++++++++++++++++-- storage/tests/unit/test_bucket.py | 78 ++++++- 5 files changed, 540 insertions(+), 18 deletions(-) diff --git a/storage/google/cloud/storage/blob.py b/storage/google/cloud/storage/blob.py index 815b155c1c58..51359bc4ded1 100644 --- a/storage/google/cloud/storage/blob.py +++ b/storage/google/cloud/storage/blob.py @@ -121,6 +121,11 @@ class Blob(_PropertyMixin): :param encryption_key: Optional 32 byte encryption key for customer-supplied encryption. See https://cloud.google.com/storage/docs/encryption#customer-supplied. + + :type kms_key_name: str + :param kms_key_name: + Optional resource name of Cloud KMS key used to encrypt the blob's + contents. """ _chunk_size = None # Default value for each instance. @@ -150,15 +155,24 @@ class Blob(_PropertyMixin): set as their 'storage_class'. """ - def __init__(self, name, bucket, chunk_size=None, encryption_key=None): + def __init__(self, name, bucket, chunk_size=None, + encryption_key=None, kms_key_name=None): name = _bytes_to_unicode(name) super(Blob, self).__init__(name=name) self.chunk_size = chunk_size # Check that setter accepts value. self.bucket = bucket self._acl = ObjectACL(self) + if encryption_key is not None and kms_key_name is not None: + raise ValueError( + "Pass at most one of 'encryption_key' " + "and 'kms_key_name'") + self._encryption_key = encryption_key + if kms_key_name is not None: + self._properties['kmsKeyName'] = kms_key_name + @property def chunk_size(self): """Get the blob's default chunk size. @@ -727,6 +741,10 @@ def _do_multipart_upload(self, client, stream, content_type, if self.user_project is not None: name_value_pairs.append(('userProject', self.user_project)) + + if self.kms_key_name is not None: + name_value_pairs.append(('kmsKeyName', self.kms_key_name)) + if predefined_acl is not None: name_value_pairs.append(('predefinedAcl', predefined_acl)) @@ -813,6 +831,10 @@ def _initiate_resumable_upload(self, client, stream, content_type, if self.user_project is not None: name_value_pairs.append(('userProject', self.user_project)) + + if self.kms_key_name is not None: + name_value_pairs.append(('kmsKeyName', self.kms_key_name)) + if predefined_acl is not None: name_value_pairs.append(('predefinedAcl', predefined_acl)) @@ -1401,6 +1423,9 @@ def rewrite(self, source, token=None, client=None): if self.user_project is not None: query_params['userProject'] = self.user_project + if self.kms_key_name is not None: + query_params['destinationKmsKeyName'] = self.kms_key_name + api_response = client._connection.api_request( method='POST', path=source.path + '/rewriteTo' + self.path, @@ -1689,6 +1714,16 @@ def size(self): if size is not None: return int(size) + @property + def kms_key_name(self): + """Resource name of Cloud KMS key used to encrypt the blob's contents. + + :rtype: str or ``NoneType`` + :returns: + The resource name or ``None`` if the property is not set locally. + """ + return self._properties.get('kmsKeyName') + storage_class = _scalar_property('storageClass') """Retrieve the storage class for the object. diff --git a/storage/google/cloud/storage/bucket.py b/storage/google/cloud/storage/bucket.py index 72defa881576..af3af4e9925a 100644 --- a/storage/google/cloud/storage/bucket.py +++ b/storage/google/cloud/storage/bucket.py @@ -171,7 +171,8 @@ def user_project(self): """ return self._user_project - def blob(self, blob_name, chunk_size=None, encryption_key=None): + def blob(self, blob_name, chunk_size=None, + encryption_key=None, kms_key_name=None): """Factory constructor for blob object. .. note:: @@ -190,11 +191,15 @@ def blob(self, blob_name, chunk_size=None, encryption_key=None): :param encryption_key: Optional 32 byte encryption key for customer-supplied encryption. + :type kms_key_name: str + :param kms_key_name: + Optional resource name of KMS key used to encrypt blob's content. + :rtype: :class:`google.cloud.storage.blob.Blob` :returns: The blob object created. """ return Blob(name=blob_name, bucket=self, chunk_size=chunk_size, - encryption_key=encryption_key) + encryption_key=encryption_key, kms_key_name=kms_key_name) def notification(self, topic_name, topic_project=None, @@ -786,6 +791,32 @@ def cors(self, entries): """ self._patch_property('cors', entries) + @property + def default_kms_key_name(self): + """Retrieve / set default KMS encryption key for objects in the bucket. + + See https://cloud.google.com/storage/docs/json_api/v1/buckets + + :setter: Set default KMS encryption key for items in this bucket. + :getter: Get default KMS encryption key for items in this bucket. + + :rtype: str + :returns: Default KMS encryption key, or ``None`` if not set. + """ + encryption_config = self._properties.get('encryption', {}) + return encryption_config.get('defaultKmsKeyName') + + @default_kms_key_name.setter + def default_kms_key_name(self, value): + """Set default KMS encryption key for objects in the bucket. + + :type value: str or None + :param value: new KMS key name (None to clear any existing key). + """ + encryption_config = self._properties.get('encryption', {}) + encryption_config['defaultKmsKeyName'] = value + self._patch_property('encryption', encryption_config) + @property def labels(self): """Retrieve or set labels assigned to this bucket. diff --git a/storage/tests/system.py b/storage/tests/system.py index cf2f985a2d33..df1b72c1f81a 100644 --- a/storage/tests/system.py +++ b/storage/tests/system.py @@ -990,3 +990,134 @@ def test_access_to_public_bucket(self): blob, = bucket.list_blobs(max_results=1) with tempfile.TemporaryFile() as stream: blob.download_to_file(stream) + + +class TestKMSIntegration(TestStorageFiles): + + FILENAMES = ( + 'file01.txt', + ) + + KEYRING_NAME = 'gcs-test' + KEY_NAME = 'gcs-test' + ALT_KEY_NAME = 'gcs-test-alternate' + + def _kms_key_name(self, key_name=None): + if key_name is None: + key_name = self.KEY_NAME + + return ( + "projects/{}/" + "locations/{}/" + "keyRings/{}/" + "cryptoKeys/{}" + ).format( + Config.CLIENT.project, + self.bucket.location.lower(), + self.KEYRING_NAME, + key_name, + ) + + def test_blob_w_explicit_kms_key_name(self): + BLOB_NAME = 'explicit-kms-key-name' + file_data = self.FILES['simple'] + kms_key_name = self._kms_key_name() + blob = self.bucket.blob(BLOB_NAME, kms_key_name=kms_key_name) + blob.upload_from_filename(file_data['path']) + self.case_blobs_to_delete.append(blob) + with open(file_data['path'], 'rb') as _file_data: + self.assertEqual(blob.download_as_string(), _file_data.read()) + # We don't know the current version of the key. + self.assertTrue(blob.kms_key_name.startswith(kms_key_name)) + + listed, = list(self.bucket.list_blobs()) + self.assertTrue(listed.kms_key_name.startswith(kms_key_name)) + + def test_bucket_w_default_kms_key_name(self): + BLOB_NAME = 'default-kms-key-name' + OVERRIDE_BLOB_NAME = 'override-default-kms-key-name' + ALT_BLOB_NAME = 'alt-default-kms-key-name' + CLEARTEXT_BLOB_NAME = 'cleartext' + + file_data = self.FILES['simple'] + + with open(file_data['path'], 'rb') as _file_data: + contents = _file_data.read() + + kms_key_name = self._kms_key_name() + self.bucket.default_kms_key_name = kms_key_name + self.bucket.patch() + self.assertEqual(self.bucket.default_kms_key_name, kms_key_name) + + defaulted_blob = self.bucket.blob(BLOB_NAME) + defaulted_blob.upload_from_filename(file_data['path']) + self.case_blobs_to_delete.append(defaulted_blob) + + self.assertEqual(defaulted_blob.download_as_string(), contents) + # We don't know the current version of the key. + self.assertTrue(defaulted_blob.kms_key_name.startswith(kms_key_name)) + + alt_kms_key_name = self._kms_key_name(self.ALT_KEY_NAME) + + override_blob = self.bucket.blob( + OVERRIDE_BLOB_NAME, kms_key_name=alt_kms_key_name) + override_blob.upload_from_filename(file_data['path']) + self.case_blobs_to_delete.append(override_blob) + + self.assertEqual(override_blob.download_as_string(), contents) + # We don't know the current version of the key. + self.assertTrue( + override_blob.kms_key_name.startswith(alt_kms_key_name)) + + self.bucket.default_kms_key_name = alt_kms_key_name + self.bucket.patch() + + alt_blob = self.bucket.blob(ALT_BLOB_NAME) + alt_blob.upload_from_filename(file_data['path']) + self.case_blobs_to_delete.append(alt_blob) + + self.assertEqual(alt_blob.download_as_string(), contents) + # We don't know the current version of the key. + self.assertTrue(alt_blob.kms_key_name.startswith(alt_kms_key_name)) + + self.bucket.default_kms_key_name = None + self.bucket.patch() + + cleartext_blob = self.bucket.blob(CLEARTEXT_BLOB_NAME) + cleartext_blob.upload_from_filename(file_data['path']) + self.case_blobs_to_delete.append(cleartext_blob) + + self.assertEqual(cleartext_blob.download_as_string(), contents) + self.assertIsNone(cleartext_blob.kms_key_name) + + def test_rewrite_rotate_csek_to_cmek(self): + BLOB_NAME = 'rotating-keys' + file_data = self.FILES['simple'] + + SOURCE_KEY = os.urandom(32) + source = self.bucket.blob(BLOB_NAME, encryption_key=SOURCE_KEY) + source.upload_from_filename(file_data['path']) + self.case_blobs_to_delete.append(source) + source_data = source.download_as_string() + + kms_key_name = self._kms_key_name() + + # We can't verify it, but ideally we would check that the following + # URL was resolvable with our credentals + # KEY_URL = 'https://cloudkms.googleapis.com/v1/{}'.format( + # kms_key_name) + + dest = self.bucket.blob(BLOB_NAME, kms_key_name=kms_key_name) + token, rewritten, total = dest.rewrite(source) + + while token is not None: + token, rewritten, total = dest.rewrite(source, token=token) + + # Not adding 'dest' to 'self.case_blobs_to_delete': it is the + # same object as 'source'. + + self.assertIsNone(token) + self.assertEqual(rewritten, len(source_data)) + self.assertEqual(total, len(source_data)) + + self.assertEqual(dest.download_as_string(), source_data) diff --git a/storage/tests/unit/test_blob.py b/storage/tests/unit/test_blob.py index 88a7fc753b85..6122b40a844d 100644 --- a/storage/tests/unit/test_blob.py +++ b/storage/tests/unit/test_blob.py @@ -39,9 +39,9 @@ class Test_Blob(unittest.TestCase): def _make_one(*args, **kw): from google.cloud.storage.blob import Blob - properties = kw.pop('properties', None) + properties = kw.pop('properties', {}) blob = Blob(*args, **kw) - blob._properties = properties or {} + blob._properties.update(properties) return blob def test_ctor_wo_encryption_key(self): @@ -55,6 +55,7 @@ def test_ctor_wo_encryption_key(self): self.assertFalse(blob._acl.loaded) self.assertIs(blob._acl.blob, blob) self.assertEqual(blob._encryption_key, None) + self.assertEqual(blob.kms_key_name, None) def test_ctor_with_encoded_unicode(self): blob_name = b'wet \xe2\x9b\xb5' @@ -70,6 +71,134 @@ def test_ctor_w_encryption_key(self): bucket = _Bucket() blob = self._make_one(BLOB_NAME, bucket=bucket, encryption_key=KEY) self.assertEqual(blob._encryption_key, KEY) + self.assertEqual(blob.kms_key_name, None) + + def test_ctor_w_kms_key_name_and_encryption_key(self): + KEY = b'01234567890123456789012345678901' # 32 bytes + KMS_RESOURCE = ( + "projects/test-project-123/" + "locations/us/" + "keyRings/test-ring/" + "cryptoKeys/test-key" + ) + BLOB_NAME = 'blob-name' + bucket = _Bucket() + + with self.assertRaises(ValueError): + self._make_one( + BLOB_NAME, bucket=bucket, + encryption_key=KEY, + kms_key_name=KMS_RESOURCE) + + def test_ctor_w_kms_key_name(self): + KMS_RESOURCE = ( + "projects/test-project-123/" + "locations/us/" + "keyRings/test-ring/" + "cryptoKeys/test-key" + ) + BLOB_NAME = 'blob-name' + bucket = _Bucket() + blob = self._make_one( + BLOB_NAME, bucket=bucket, kms_key_name=KMS_RESOURCE) + self.assertEqual(blob._encryption_key, None) + self.assertEqual(blob.kms_key_name, KMS_RESOURCE) + + def _set_properties_helper(self, kms_key_name=None): + import datetime + from google.cloud._helpers import UTC + from google.cloud._helpers import _RFC3339_MICROS + now = datetime.datetime.utcnow().replace(tzinfo=UTC) + NOW = now.strftime(_RFC3339_MICROS) + BLOB_NAME = 'blob-name' + GENERATION = 12345 + BLOB_ID = 'name/{}/{}'.format(BLOB_NAME, GENERATION) + SELF_LINK = 'http://example.com/self/' + METAGENERATION = 23456 + SIZE = 12345 + MD5_HASH = 'DEADBEEF' + MEDIA_LINK = 'http://example.com/media/' + ENTITY = 'project-owner-12345' + ENTITY_ID = '23456' + CRC32C = 'FACE0DAC' + COMPONENT_COUNT = 2 + ETAG = 'ETAG' + resource = { + 'id': BLOB_ID, + 'selfLink': SELF_LINK, + 'generation': GENERATION, + 'metageneration': METAGENERATION, + 'contentType': 'text/plain', + 'timeCreated': NOW, + 'updated': NOW, + 'timeDeleted': NOW, + 'storageClass': 'NEARLINE', + 'timeStorageClassUpdated': NOW, + 'size': SIZE, + 'md5Hash': MD5_HASH, + 'mediaLink': MEDIA_LINK, + 'contentEncoding': 'gzip', + 'contentDisposition': 'inline', + 'contentLanguage': 'en-US', + 'cacheControl': 'private', + 'metadata': { + 'foo': 'Foo', + }, + 'owner': { + 'entity': ENTITY, + 'entityId': ENTITY_ID, + }, + 'crc32c': CRC32C, + 'componentCount': COMPONENT_COUNT, + 'etag': ETAG, + } + + if kms_key_name is not None: + resource['kmsKeyName'] = kms_key_name + + bucket = _Bucket() + blob = self._make_one(BLOB_NAME, bucket=bucket) + + blob._set_properties(resource) + + self.assertEqual(blob.id, BLOB_ID) + self.assertEqual(blob.self_link, SELF_LINK) + self.assertEqual(blob.generation, GENERATION) + self.assertEqual(blob.metageneration, METAGENERATION) + self.assertEqual(blob.content_type, 'text/plain') + self.assertEqual(blob.time_created, now) + self.assertEqual(blob.updated, now) + self.assertEqual(blob.time_deleted, now) + self.assertEqual(blob.storage_class, 'NEARLINE') + self.assertEqual(blob.size, SIZE) + self.assertEqual(blob.md5_hash, MD5_HASH) + self.assertEqual(blob.media_link, MEDIA_LINK) + self.assertEqual(blob.content_encoding, 'gzip') + self.assertEqual(blob.content_disposition, 'inline') + self.assertEqual(blob.content_language, 'en-US') + self.assertEqual(blob.cache_control, 'private') + self.assertEqual(blob.metadata, {'foo': 'Foo'}) + self.assertEqual(blob.owner, {'entity': ENTITY, 'entityId': ENTITY_ID}) + self.assertEqual(blob.crc32c, CRC32C) + self.assertEqual(blob.component_count, COMPONENT_COUNT) + self.assertEqual(blob.etag, ETAG) + + if kms_key_name is not None: + self.assertEqual(blob.kms_key_name, kms_key_name) + else: + self.assertIsNone(blob.kms_key_name) + + def test__set_properties_wo_kms_key_name(self): + self._set_properties_helper() + + def test__set_properties_w_kms_key_name(self): + kms_resource = ( + "projects/test-project-123/" + "locations/us/" + "keyRings/test-ring/" + "cryptoKeys/test-key" + ) + self._set_properties_helper(kms_key_name=kms_resource) def test_chunk_size_ctor(self): from google.cloud.storage.blob import Blob @@ -459,6 +588,25 @@ def test__get_download_url_on_the_fly_with_user_project(self): user_project)) self.assertEqual(download_url, expected_url) + def test__get_download_url_on_the_fly_with_kms_key_name(self): + kms_resource = ( + "projects/test-project-123/" + "locations/us/" + "keyRings/test-ring/" + "cryptoKeys/test-key" + ) + blob_name = 'bzzz-fly.txt' + bucket = _Bucket(name='buhkit') + blob = self._make_one( + blob_name, bucket=bucket, kms_key_name=kms_resource) + + self.assertIsNone(blob.media_link) + download_url = blob._get_download_url() + expected_url = ( + 'https://www.googleapis.com/download/storage/v1/b/' + 'buhkit/o/bzzz-fly.txt?alt=media') + self.assertEqual(download_url, expected_url) + @staticmethod def _mock_requests_response( status_code, headers, content=b'', stream=False): @@ -1000,9 +1148,11 @@ def _mock_transport(self, status_code, headers, content=b''): def _do_multipart_success(self, mock_get_boundary, size=None, num_retries=None, user_project=None, - predefined_acl=None): + predefined_acl=None, kms_key_name=None): + from six.moves.urllib.parse import urlencode bucket = _Bucket(name='w00t', user_project=user_project) - blob = self._make_one(u'blob-name', bucket=bucket) + blob = self._make_one( + u'blob-name', bucket=bucket, kms_key_name=kms_key_name) self.assertIsNone(blob.chunk_size) # Create mocks to be checked for doing transport. @@ -1029,12 +1179,20 @@ def _do_multipart_success(self, mock_get_boundary, size=None, upload_url = ( 'https://www.googleapis.com/upload/storage/v1' + - bucket.path + - '/o?uploadType=multipart') + bucket.path + '/o') + + qs_params = [('uploadType', 'multipart')] + if user_project is not None: - upload_url += '&userProject={}'.format(user_project) + qs_params.append(('userProject', user_project)) + if predefined_acl is not None: - upload_url += '&predefinedAcl={}'.format(predefined_acl) + qs_params.append(('predefinedAcl', predefined_acl)) + + if kms_key_name is not None: + qs_params.append(('kmsKeyName', kms_key_name)) + + upload_url += '?' + urlencode(qs_params) payload = ( b'--==0==\r\n' + @@ -1065,6 +1223,18 @@ def test__do_multipart_upload_with_user_project(self, mock_get_boundary): self._do_multipart_success( mock_get_boundary, user_project=user_project) + @mock.patch(u'google.resumable_media._upload.get_boundary', + return_value=b'==0==') + def test__do_multipart_upload_with_kms(self, mock_get_boundary): + kms_resource = ( + "projects/test-project-123/" + "locations/us/" + "keyRings/test-ring/" + "cryptoKeys/test-key" + ) + self._do_multipart_success( + mock_get_boundary, kms_key_name=kms_resource) + @mock.patch(u'google.resumable_media._upload.get_boundary', return_value=b'==0==') def test__do_multipart_upload_with_retry(self, mock_get_boundary): @@ -1089,11 +1259,13 @@ def test__do_multipart_upload_bad_size(self): def _initiate_resumable_helper( self, size=None, extra_headers=None, chunk_size=None, num_retries=None, user_project=None, predefined_acl=None, - blob_chunk_size=786432): + blob_chunk_size=786432, kms_key_name=None): + from six.moves.urllib.parse import urlencode from google.resumable_media.requests import ResumableUpload bucket = _Bucket(name='whammy', user_project=user_project) - blob = self._make_one(u'blob-name', bucket=bucket) + blob = self._make_one( + u'blob-name', bucket=bucket, kms_key_name=kms_key_name) blob.metadata = {'rook': 'takes knight'} blob.chunk_size = blob_chunk_size if blob_chunk_size is not None: @@ -1124,14 +1296,23 @@ def _initiate_resumable_helper( # Check the returned values. self.assertIsInstance(upload, ResumableUpload) + upload_url = ( 'https://www.googleapis.com/upload/storage/v1' + - bucket.path + - '/o?uploadType=resumable') + bucket.path + '/o') + qs_params = [('uploadType', 'resumable')] + if user_project is not None: - upload_url += '&userProject={}'.format(user_project) + qs_params.append(('userProject', user_project)) + if predefined_acl is not None: - upload_url += '&predefinedAcl={}'.format(predefined_acl) + qs_params.append(('predefinedAcl', predefined_acl)) + + if kms_key_name is not None: + qs_params.append(('kmsKeyName', kms_key_name)) + + upload_url += '?' + urlencode(qs_params) + self.assertEqual(upload.upload_url, upload_url) if extra_headers is None: self.assertEqual(upload._headers, {}) @@ -1191,6 +1372,15 @@ def test__initiate_resumable_upload_with_user_project(self): user_project = 'user-project-123' self._initiate_resumable_helper(user_project=user_project) + def test__initiate_resumable_upload_with_kms(self): + kms_resource = ( + "projects/test-project-123/" + "locations/us/" + "keyRings/test-ring/" + "cryptoKeys/test-key" + ) + self._initiate_resumable_helper(kms_key_name=kms_resource) + def test__initiate_resumable_upload_without_chunk_size(self): self._initiate_resumable_helper(blob_chunk_size=None) @@ -2149,6 +2339,65 @@ def test_rewrite_same_name_no_key_new_key_w_token(self): self.assertEqual( headers['X-Goog-Encryption-Key-Sha256'], DEST_KEY_HASH_B64) + def test_rewrite_same_name_w_old_key_new_kms_key(self): + import base64 + import hashlib + + SOURCE_KEY = b'01234567890123456789012345678901' # 32 bytes + SOURCE_KEY_B64 = base64.b64encode(SOURCE_KEY).rstrip().decode('ascii') + SOURCE_KEY_HASH = hashlib.sha256(SOURCE_KEY).digest() + SOURCE_KEY_HASH_B64 = base64.b64encode( + SOURCE_KEY_HASH).rstrip().decode('ascii') + DEST_KMS_RESOURCE = ( + "projects/test-project-123/" + "locations/us/" + "keyRings/test-ring/" + "cryptoKeys/test-key" + ) + BLOB_NAME = 'blob' + RESPONSE = { + 'totalBytesRewritten': 42, + 'objectSize': 42, + 'done': True, + 'resource': {'etag': 'DEADBEEF'}, + } + response = ({'status': http_client.OK}, RESPONSE) + connection = _Connection(response) + client = _Client(connection) + bucket = _Bucket(client=client) + source = self._make_one( + BLOB_NAME, bucket=bucket, encryption_key=SOURCE_KEY) + dest = self._make_one(BLOB_NAME, bucket=bucket, + kms_key_name=DEST_KMS_RESOURCE) + + token, rewritten, size = dest.rewrite(source) + + self.assertIsNone(token) + self.assertEqual(rewritten, 42) + self.assertEqual(size, 42) + + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'POST') + PATH = '/b/name/o/%s/rewriteTo/b/name/o/%s' % (BLOB_NAME, BLOB_NAME) + self.assertEqual(kw[0]['path'], PATH) + self.assertEqual(kw[0]['query_params'], + {'destinationKmsKeyName': DEST_KMS_RESOURCE}) + SENT = { + 'kmsKeyName': DEST_KMS_RESOURCE, + } + self.assertEqual(kw[0]['data'], SENT) + + headers = { + key.title(): str(value) for key, value in kw[0]['headers'].items()} + self.assertEqual( + headers['X-Goog-Copy-Source-Encryption-Algorithm'], 'AES256') + self.assertEqual( + headers['X-Goog-Copy-Source-Encryption-Key'], SOURCE_KEY_B64) + self.assertEqual( + headers['X-Goog-Copy-Source-Encryption-Key-Sha256'], + SOURCE_KEY_HASH_B64) + def test_update_storage_class_invalid(self): BLOB_NAME = 'blob-name' bucket = _Bucket() diff --git a/storage/tests/unit/test_bucket.py b/storage/tests/unit/test_bucket.py index 0c67d79d1e05..92ca3bbf8700 100644 --- a/storage/tests/unit/test_bucket.py +++ b/storage/tests/unit/test_bucket.py @@ -77,7 +77,25 @@ def test_ctor_w_user_project(self): self.assertFalse(bucket._default_object_acl.loaded) self.assertIs(bucket._default_object_acl.bucket, bucket) - def test_blob(self): + def test_blob_wo_keys(self): + from google.cloud.storage.blob import Blob + + BUCKET_NAME = 'BUCKET_NAME' + BLOB_NAME = 'BLOB_NAME' + CHUNK_SIZE = 1024 * 1024 + + bucket = self._make_one(name=BUCKET_NAME) + blob = bucket.blob( + BLOB_NAME, chunk_size=CHUNK_SIZE) + self.assertIsInstance(blob, Blob) + self.assertIs(blob.bucket, bucket) + self.assertIs(blob.client, bucket.client) + self.assertEqual(blob.name, BLOB_NAME) + self.assertEqual(blob.chunk_size, CHUNK_SIZE) + self.assertIsNone(blob._encryption_key) + self.assertIsNone(blob.kms_key_name) + + def test_blob_w_encryption_key(self): from google.cloud.storage.blob import Blob BUCKET_NAME = 'BUCKET_NAME' @@ -94,6 +112,31 @@ def test_blob(self): self.assertEqual(blob.name, BLOB_NAME) self.assertEqual(blob.chunk_size, CHUNK_SIZE) self.assertEqual(blob._encryption_key, KEY) + self.assertIsNone(blob.kms_key_name) + + def test_blob_w_kms_key_name(self): + from google.cloud.storage.blob import Blob + + BUCKET_NAME = 'BUCKET_NAME' + BLOB_NAME = 'BLOB_NAME' + CHUNK_SIZE = 1024 * 1024 + KMS_RESOURCE = ( + "projects/test-project-123/" + "locations/us/" + "keyRings/test-ring/" + "cryptoKeys/test-key" + ) + + bucket = self._make_one(name=BUCKET_NAME) + blob = bucket.blob( + BLOB_NAME, chunk_size=CHUNK_SIZE, kms_key_name=KMS_RESOURCE) + self.assertIsInstance(blob, Blob) + self.assertIs(blob.bucket, bucket) + self.assertIs(blob.client, bucket.client) + self.assertEqual(blob.name, BLOB_NAME) + self.assertEqual(blob.chunk_size, CHUNK_SIZE) + self.assertIsNone(blob._encryption_key) + self.assertEqual(blob.kms_key_name, KMS_RESOURCE) def test_notification_defaults(self): from google.cloud.storage.notification import BucketNotification @@ -918,6 +961,39 @@ def test_cors_setter(self): self.assertEqual(bucket.cors, [CORS_ENTRY]) self.assertTrue('cors' in bucket._changes) + def test_default_kms_key_name_getter(self): + NAME = 'name' + KMS_RESOURCE = ( + 'projects/test-project-123/' + 'locations/us/' + 'keyRings/test-ring/' + 'cryptoKeys/test-key' + ) + ENCRYPTION_CONFIG = { + 'defaultKmsKeyName': KMS_RESOURCE, + } + bucket = self._make_one(name=NAME) + self.assertIsNone(bucket.default_kms_key_name) + bucket._properties['encryption'] = ENCRYPTION_CONFIG + self.assertEqual(bucket.default_kms_key_name, KMS_RESOURCE) + + def test_default_kms_key_name_setter(self): + NAME = 'name' + KMS_RESOURCE = ( + 'projects/test-project-123/' + 'locations/us/' + 'keyRings/test-ring/' + 'cryptoKeys/test-key' + ) + ENCRYPTION_CONFIG = { + 'defaultKmsKeyName': KMS_RESOURCE, + } + bucket = self._make_one(name=NAME) + bucket.default_kms_key_name = KMS_RESOURCE + self.assertEqual( + bucket._properties['encryption'], ENCRYPTION_CONFIG) + self.assertTrue('encryption' in bucket._changes) + def test_labels_getter(self): NAME = 'name' LABELS = {'color': 'red', 'flavor': 'cherry'} From c170ee831e2c5bf225b4e7ea8f435f8b585d1425 Mon Sep 17 00:00:00 2001 From: shollyman Date: Wed, 9 May 2018 15:56:30 -0700 Subject: [PATCH 05/75] Add timeline and top-level slot-millis to query statistics. (#5312) * Add timeline and top-level slot-millis to query statistics. * address reviewer comment: add _int_or_none guards * Add a system test that examines query statistics. * Remove ratio evaluation from the query statistics system test. --- bigquery/google/cloud/bigquery/job.py | 110 ++++++++++++++++--- bigquery/tests/system.py | 76 +++++++++++++ bigquery/tests/unit/test_job.py | 148 ++++++++++++++++++++------ 3 files changed, 286 insertions(+), 48 deletions(-) diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index e9c3bfa783ec..186beab31ba8 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -2200,6 +2200,14 @@ def query_plan(self): plan_entries = self._job_statistics().get('queryPlan', ()) return [QueryPlanEntry.from_api_repr(entry) for entry in plan_entries] + @property + def timeline(self): + """List(TimelineEntry): Return the query execution timeline + from job statistics. + """ + raw = self._job_statistics().get('timeline', ()) + return [TimelineEntry.from_api_repr(entry) for entry in raw] + @property def total_bytes_processed(self): """Return total bytes processed from job statistics, if present. @@ -2274,6 +2282,11 @@ def num_dml_affected_rows(self): result = int(result) return result + @property + def slot_millis(self): + """Union[int, None]: Slot-milliseconds used by this query job.""" + return _int_or_none(self._job_statistics().get('totalSlotMs')) + @property def statement_type(self): """Return statement type from job statistics, if present. @@ -2518,7 +2531,7 @@ def start(self): if self._properties.get('startMs') is None: return None return _datetime_from_microseconds( - self._properties.get('startMs') * 1000.0) + int(self._properties.get('startMs')) * 1000.0) @property def end(self): @@ -2526,38 +2539,41 @@ def end(self): if self._properties.get('endMs') is None: return None return _datetime_from_microseconds( - self._properties.get('endMs') * 1000.0) + int(self._properties.get('endMs')) * 1000.0) @property def input_stages(self): """List(int): Entry IDs for stages that were inputs for this stage.""" - return self._properties.get('inputStages', []) + if self._properties.get('inputStages') is None: + return [] + return [_int_or_none(entry) + for entry in self._properties.get('inputStages')] @property def parallel_inputs(self): """Union[int, None]: Number of parallel input segments within the stage. """ - return self._properties.get('parallelInputs') + return _int_or_none(self._properties.get('parallelInputs')) @property def completed_parallel_inputs(self): """Union[int, None]: Number of parallel input segments completed.""" - return self._properties.get('completedParallelInputs') + return _int_or_none(self._properties.get('completedParallelInputs')) @property def wait_ms_avg(self): """Union[int, None]: Milliseconds the average worker spent waiting to be scheduled. """ - return self._properties.get('waitMsAvg') + return _int_or_none(self._properties.get('waitMsAvg')) @property def wait_ms_max(self): """Union[int, None]: Milliseconds the slowest worker spent waiting to be scheduled. """ - return self._properties.get('waitMsMax') + return _int_or_none(self._properties.get('waitMsMax')) @property def wait_ratio_avg(self): @@ -2580,14 +2596,14 @@ def read_ms_avg(self): """Union[int, None]: Milliseconds the average worker spent reading input. """ - return self._properties.get('readMsAvg') + return _int_or_none(self._properties.get('readMsAvg')) @property def read_ms_max(self): """Union[int, None]: Milliseconds the slowest worker spent reading input. """ - return self._properties.get('readMsMax') + return _int_or_none(self._properties.get('readMsMax')) @property def read_ratio_avg(self): @@ -2610,14 +2626,14 @@ def compute_ms_avg(self): """Union[int, None]: Milliseconds the average worker spent on CPU-bound processing. """ - return self._properties.get('computeMsAvg') + return _int_or_none(self._properties.get('computeMsAvg')) @property def compute_ms_max(self): """Union[int, None]: Milliseconds the slowest worker spent on CPU-bound processing. """ - return self._properties.get('computeMsMax') + return _int_or_none(self._properties.get('computeMsMax')) @property def compute_ratio_avg(self): @@ -2640,14 +2656,14 @@ def write_ms_avg(self): """Union[int, None]: Milliseconds the average worker spent writing output data. """ - return self._properties.get('writeMsAvg') + return _int_or_none(self._properties.get('writeMsAvg')) @property def write_ms_max(self): """Union[int, None]: Milliseconds the slowest worker spent writing output data. """ - return self._properties.get('writeMsMax') + return _int_or_none(self._properties.get('writeMsMax')) @property def write_ratio_avg(self): @@ -2668,12 +2684,12 @@ def write_ratio_max(self): @property def records_read(self): """Union[int, None]: Number of records read by this stage.""" - return self._properties.get('recordsRead') + return _int_or_none(self._properties.get('recordsRead')) @property def records_written(self): """Union[int, None]: Number of records written by this stage.""" - return self._properties.get('recordsWritten') + return _int_or_none(self._properties.get('recordsWritten')) @property def status(self): @@ -2685,14 +2701,14 @@ def shuffle_output_bytes(self): """Union[int, None]: Number of bytes written by this stage to intermediate shuffle. """ - return self._properties.get('shuffleOutputBytes') + return _int_or_none(self._properties.get('shuffleOutputBytes')) @property def shuffle_output_bytes_spilled(self): """Union[int, None]: Number of bytes written by this stage to intermediate shuffle and spilled to disk. """ - return self._properties.get('shuffleOutputBytesSpilled') + return _int_or_none(self._properties.get('shuffleOutputBytesSpilled')) @property def steps(self): @@ -2703,6 +2719,66 @@ def steps(self): for step in self._properties.get('steps', [])] +class TimelineEntry(object): + """TimelineEntry represents progress of a query job at a particular + point in time. + + See + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs + for the underlying API representation within query statistics. + + """ + + def __init__(self): + self._properties = {} + + @classmethod + def from_api_repr(cls, resource): + """Factory: construct instance from the JSON repr. + + Args: + resource(Dict[str: object]): + QueryTimelineSample representation returned from API + + Returns: + google.cloud.bigquery.TimelineEntry: + Timeline sample parsed from ``resource`` + """ + entry = cls() + entry._properties = resource + return entry + + @property + def elapsed_ms(self): + """Union[int, None]: Milliseconds elapsed since start of query + execution.""" + return _int_or_none(self._properties.get('elapsedMs')) + + @property + def active_units(self): + """Union[int, None]: Current number of input units being processed + by workers, reported as largest value since the last sample.""" + return _int_or_none(self._properties.get('activeUnits')) + + @property + def pending_units(self): + """Union[int, None]: Current number of input units remaining for + query stages active at this sample time.""" + return _int_or_none(self._properties.get('pendingUnits')) + + @property + def completed_units(self): + """Union[int, None]: Current number of input units completed by + this query.""" + return _int_or_none(self._properties.get('completedUnits')) + + @property + def slot_millis(self): + """Union[int, None]: Cumulative slot-milliseconds consumed by + this query.""" + return _int_or_none(self._properties.get('totalSlotMs')) + + class UnknownJob(_AsyncJob): """A job whose type cannot be determined.""" diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 177f944ace3e..72ff31a4d133 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -1087,6 +1087,82 @@ def test_query_w_timeout(self): # 1 second is much too short for this query. query_job.result(timeout=1) + def test_query_statistics(self): + """ + A system test to exercise some of the extended query statistics. + + Note: We construct a query that should need at least three stages by + specifying a JOIN query. Exact plan and stats are effectively + non-deterministic, so we're largely interested in confirming values + are present. + """ + + job_config = bigquery.QueryJobConfig() + job_config.use_query_cache = False + + query_job = Config.CLIENT.query( + """ + SELECT + COUNT(1) + FROM + ( + SELECT + year, + wban_number + FROM `bigquery-public-data.samples.gsod` + LIMIT 1000 + ) lside + INNER JOIN + ( + SELECT + year, + state + FROM `bigquery-public-data.samples.natality` + LIMIT 1000 + ) rside + ON + lside.year = rside.year + """, + location='US', + job_config=job_config) + + # run the job to completion + query_job.result() + + # Assert top-level stats + self.assertFalse(query_job.cache_hit) + self.assertIsNotNone(query_job.destination) + self.assertTrue(query_job.done) + self.assertFalse(query_job.dry_run) + self.assertIsNone(query_job.num_dml_affected_rows) + self.assertEqual(query_job.priority, 'INTERACTIVE') + self.assertGreater(query_job.total_bytes_billed, 1) + self.assertGreater(query_job.total_bytes_processed, 1) + self.assertEqual(query_job.statement_type, 'SELECT') + self.assertGreater(query_job.slot_millis, 1) + + # Make assertions on the shape of the query plan. + plan = query_job.query_plan + self.assertGreaterEqual(len(plan), 3) + first_stage = plan[0] + self.assertIsNotNone(first_stage.start) + self.assertIsNotNone(first_stage.end) + self.assertIsNotNone(first_stage.entry_id) + self.assertIsNotNone(first_stage.name) + self.assertGreater(first_stage.parallel_inputs, 0) + self.assertGreater(first_stage.completed_parallel_inputs, 0) + self.assertGreater(first_stage.shuffle_output_bytes, 0) + self.assertEqual(first_stage.status, 'COMPLETE') + + # Query plan is a digraph. Ensure it has inter-stage links, + # but not every stage has inputs. + stages_with_inputs = 0 + for entry in plan: + if len(entry.input_stages) > 0: + stages_with_inputs = stages_with_inputs + 1 + self.assertGreater(stages_with_inputs, 0) + self.assertGreater(len(plan), stages_with_inputs) + def test_dbapi_w_standard_sql_types(self): examples = self._generate_standard_sql_types_examples() for example in examples: diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index a531a0976942..d381c31895f3 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -2289,33 +2289,33 @@ def test_query_plan(self): plan_entries = [{ 'name': 'NAME', - 'id': 1234, - 'inputStages': [88, 101], - 'startMs': 1522540800000, - 'endMs': 1522540804000, - 'parallelInputs': 1000, - 'completedParallelInputs': 5, - 'waitMsAvg': 33, - 'waitMsMax': 400, + 'id': '1234', + 'inputStages': ['88', '101'], + 'startMs': '1522540800000', + 'endMs': '1522540804000', + 'parallelInputs': '1000', + 'completedParallelInputs': '5', + 'waitMsAvg': '33', + 'waitMsMax': '400', 'waitRatioAvg': 2.71828, 'waitRatioMax': 3.14159, - 'readMsAvg': 45, - 'readMsMax': 90, + 'readMsAvg': '45', + 'readMsMax': '90', 'readRatioAvg': 1.41421, 'readRatioMax': 1.73205, - 'computeMsAvg': 55, - 'computeMsMax': 99, + 'computeMsAvg': '55', + 'computeMsMax': '99', 'computeRatioAvg': 0.69315, 'computeRatioMax': 1.09861, - 'writeMsAvg': 203, - 'writeMsMax': 340, + 'writeMsAvg': '203', + 'writeMsMax': '340', 'writeRatioAvg': 3.32193, 'writeRatioMax': 2.30258, 'recordsRead': '100', 'recordsWritten': '1', 'status': 'STATUS', - 'shuffleOutputBytes': 1024, - 'shuffleOutputBytesSpilled': 1, + 'shuffleOutputBytes': '1024', + 'shuffleOutputBytesSpilled': '1', 'steps': [{ 'kind': 'KIND', 'substeps': ['SUBSTEP1', 'SUBSTEP2'], @@ -2342,7 +2342,7 @@ def test_query_plan(self): len(found.input_stages), len(expected['inputStages'])) for f_id in found.input_stages: - self.assertIn(f_id, expected['inputStages']) + self.assertIn(f_id, [int(e) for e in expected['inputStages']]) self.assertEqual( found.start.strftime(_RFC3339_MICROS), '2018-04-01T00:00:00.000000Z') @@ -2351,39 +2351,43 @@ def test_query_plan(self): '2018-04-01T00:00:04.000000Z') self.assertEqual( found.parallel_inputs, - expected['parallelInputs']) + int(expected['parallelInputs'])) self.assertEqual( found.completed_parallel_inputs, - expected['completedParallelInputs']) - self.assertEqual(found.wait_ms_avg, expected['waitMsAvg']) - self.assertEqual(found.wait_ms_max, expected['waitMsMax']) + int(expected['completedParallelInputs'])) + self.assertEqual(found.wait_ms_avg, int(expected['waitMsAvg'])) + self.assertEqual(found.wait_ms_max, int(expected['waitMsMax'])) self.assertEqual(found.wait_ratio_avg, expected['waitRatioAvg']) self.assertEqual(found.wait_ratio_max, expected['waitRatioMax']) - self.assertEqual(found.read_ms_avg, expected['readMsAvg']) - self.assertEqual(found.read_ms_max, expected['readMsMax']) + self.assertEqual(found.read_ms_avg, int(expected['readMsAvg'])) + self.assertEqual(found.read_ms_max, int(expected['readMsMax'])) self.assertEqual(found.read_ratio_avg, expected['readRatioAvg']) self.assertEqual(found.read_ratio_max, expected['readRatioMax']) - self.assertEqual(found.compute_ms_avg, expected['computeMsAvg']) - self.assertEqual(found.compute_ms_max, expected['computeMsMax']) + self.assertEqual( + found.compute_ms_avg, + int(expected['computeMsAvg'])) + self.assertEqual( + found.compute_ms_max, + int(expected['computeMsMax'])) self.assertEqual( found.compute_ratio_avg, expected['computeRatioAvg']) self.assertEqual( found.compute_ratio_max, expected['computeRatioMax']) - self.assertEqual(found.write_ms_avg, expected['writeMsAvg']) - self.assertEqual(found.write_ms_max, expected['writeMsMax']) + self.assertEqual(found.write_ms_avg, int(expected['writeMsAvg'])) + self.assertEqual(found.write_ms_max, int(expected['writeMsMax'])) self.assertEqual(found.write_ratio_avg, expected['writeRatioAvg']) self.assertEqual(found.write_ratio_max, expected['writeRatioMax']) self.assertEqual( - found.records_read, expected['recordsRead']) + found.records_read, int(expected['recordsRead'])) self.assertEqual( - found.records_written, expected['recordsWritten']) + found.records_written, int(expected['recordsWritten'])) self.assertEqual(found.status, expected['status']) self.assertEqual( found.shuffle_output_bytes, - expected['shuffleOutputBytes']) + int(expected['shuffleOutputBytes'])) self.assertEqual( found.shuffle_output_bytes_spilled, - expected['shuffleOutputBytesSpilled']) + int(expected['shuffleOutputBytesSpilled'])) self.assertEqual(len(found.steps), len(expected['steps'])) for f_step, e_step in zip(found.steps, expected['steps']): @@ -2465,6 +2469,21 @@ def test_num_dml_affected_rows(self): query_stats['numDmlAffectedRows'] = str(num_rows) self.assertEqual(job.num_dml_affected_rows, num_rows) + def test_slot_millis(self): + millis = 1234 + client = _make_client(project=self.PROJECT) + job = self._make_one(self.JOB_ID, self.QUERY, client) + self.assertIsNone(job.slot_millis) + + statistics = job._properties['statistics'] = {} + self.assertIsNone(job.slot_millis) + + query_stats = statistics['query'] = {} + self.assertIsNone(job.slot_millis) + + query_stats['totalSlotMs'] = millis + self.assertEqual(job.slot_millis, millis) + def test_statement_type(self): statement_type = 'SELECT' client = _make_client(project=self.PROJECT) @@ -2527,6 +2546,34 @@ def test_referenced_tables(self): self.assertEqual(remote.dataset_id, 'other-dataset') self.assertEqual(remote.project, 'other-project-123') + def test_timeline(self): + timeline_resource = [{ + 'elapsedMs': 1, + 'activeUnits': 22, + 'pendingUnits': 33, + 'completedUnits': 44, + 'totalSlotMs': 101, + }] + + client = _make_client(project=self.PROJECT) + job = self._make_one(self.JOB_ID, self.QUERY, client) + self.assertEqual(job.timeline, []) + + statistics = job._properties['statistics'] = {} + self.assertEqual(job.timeline, []) + + query_stats = statistics['query'] = {} + self.assertEqual(job.timeline, []) + + query_stats['timeline'] = timeline_resource + + self.assertEqual(len(job.timeline), len(timeline_resource)) + self.assertEqual(job.timeline[0].elapsed_ms, 1) + self.assertEqual(job.timeline[0].active_units, 22) + self.assertEqual(job.timeline[0].pending_units, 33) + self.assertEqual(job.timeline[0].completed_units, 44) + self.assertEqual(job.timeline[0].slot_millis, 101) + def test_undeclared_query_parameters(self): from google.cloud.bigquery.query import ArrayQueryParameter from google.cloud.bigquery.query import ScalarQueryParameter @@ -3455,3 +3502,42 @@ def test_end(self): self.assertEqual( entry.end.strftime(_RFC3339_MICROS), self.END_RFC3339_MICROS) + + +class TestTimelineEntry(unittest.TestCase, _Base): + ELAPSED_MS = 101 + ACTIVE_UNITS = 50 + PENDING_UNITS = 98 + COMPLETED_UNITS = 520 + SLOT_MILLIS = 12029 + + @staticmethod + def _get_target_class(): + from google.cloud.bigquery.job import TimelineEntry + return TimelineEntry + + def test_from_api_repr_empty(self): + klass = self._get_target_class() + entry = klass.from_api_repr({}) + self.assertIsNone(entry.elapsed_ms) + self.assertIsNone(entry.active_units) + self.assertIsNone(entry.pending_units) + self.assertIsNone(entry.completed_units) + self.assertIsNone(entry.slot_millis) + + def test_from_api_repr_normal(self): + resource = { + 'elapsedMs': self.ELAPSED_MS, + 'activeUnits': self.ACTIVE_UNITS, + 'pendingUnits': self.PENDING_UNITS, + 'completedUnits': self.COMPLETED_UNITS, + 'totalSlotMs': self.SLOT_MILLIS, + } + klass = self._get_target_class() + + entry = klass.from_api_repr(resource) + self.assertEqual(entry.elapsed_ms, self.ELAPSED_MS) + self.assertEqual(entry.active_units, self.ACTIVE_UNITS) + self.assertEqual(entry.pending_units, self.PENDING_UNITS) + self.assertEqual(entry.completed_units, self.COMPLETED_UNITS) + self.assertEqual(entry.slot_millis, self.SLOT_MILLIS) From 1b76727be16bc4335276f793340bb72d32be7166 Mon Sep 17 00:00:00 2001 From: shollyman Date: Thu, 10 May 2018 13:40:56 -0700 Subject: [PATCH 06/75] BigQuery: improve system test performance (#5319) * improve system test perf * Correct comment --- bigquery/tests/system.py | 120 +++++++++++++++++++-------------------- 1 file changed, 57 insertions(+), 63 deletions(-) diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 72ff31a4d133..7d79541c2729 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -385,58 +385,54 @@ def _fetch_single_page(table, selected_fields=None): page = six.next(iterator.pages) return list(page) - def _create_table_many_columns(self, rows): - # Load a table with many columns - dataset = self.temp_dataset(_make_dataset_id('list_rows')) + def _create_table_many_columns(self, rowcount): + # Generate a table of maximum width via CREATE TABLE AS SELECT. + # first column is named 'rowval', and has a value from 1..rowcount + # Subsequent column is named col_ and contains the value N*rowval, + # where N is between 1 and 9999 inclusive. + dsname = _make_dataset_id('wide_schema') + dataset = self.temp_dataset(dsname) table_id = 'many_columns' table_ref = dataset.table(table_id) self.to_delete.insert(0, table_ref) - schema = [ - bigquery.SchemaField( - 'column_{}_with_long_name'.format(col_i), - 'INTEGER') - for col_i in range(len(rows[0]))] - body = '' - for row in rows: - body += ','.join([str(item) for item in row]) - body += '\n' - config = bigquery.LoadJobConfig() - config.schema = schema - job = Config.CLIENT.load_table_from_file( - six.BytesIO(body.encode('ascii')), table_ref, job_config=config) - job.result() - return bigquery.Table(table_ref, schema=schema) - - def test_list_rows_many_columns(self): - rows = [[], []] - # BigQuery tables can have max 10,000 columns - for col_i in range(9999): - rows[0].append(col_i) - rows[1].append(10000 - col_i) - expected_rows = frozenset([tuple(row) for row in rows]) - table = self._create_table_many_columns(rows) - - rows = list(Config.CLIENT.list_rows(table)) - - assert len(rows) == 2 - rows_set = frozenset([tuple(row.values()) for row in rows]) - assert rows_set == expected_rows + colprojections = ','.join( + ['r * {} as col_{}'.format(n, n) for n in range(1, 10000)]) + sql = """ + CREATE TABLE {}.{} + AS + SELECT + r as rowval, + {} + FROM + UNNEST(GENERATE_ARRAY(1,{},1)) as r + """.format(dsname, table_id, colprojections, rowcount) + query_job = Config.CLIENT.query(sql) + query_job.result() + return table_ref def test_query_many_columns(self): - rows = [[], []] - # BigQuery tables can have max 10,000 columns - for col_i in range(9999): - rows[0].append(col_i) - rows[1].append(10000 - col_i) - expected_rows = frozenset([tuple(row) for row in rows]) - table = self._create_table_many_columns(rows) - + # Test working with the widest schema BigQuery supports, 10k columns. + row_count = 2 + table_ref = self._create_table_many_columns(row_count) rows = list(Config.CLIENT.query( - 'SELECT * FROM `{}.many_columns`'.format(table.dataset_id))) - - assert len(rows) == 2 - rows_set = frozenset([tuple(row.values()) for row in rows]) - assert rows_set == expected_rows + 'SELECT * FROM `{}.{}`'.format( + table_ref.dataset_id, table_ref.table_id))) + + self.assertEqual(len(rows), row_count) + + # check field representations adhere to expected values. + correctwidth = 0 + badvals = 0 + for r in rows: + vals = r._xxx_values + rowval = vals[0] + if len(vals) == 10000: + correctwidth = correctwidth + 1 + for n in range(1, 10000): + if vals[n] != rowval * (n): + badvals = badvals + 1 + self.assertEqual(correctwidth, row_count) + self.assertEqual(badvals, 0) def test_insert_rows_then_dump_table(self): NOW_SECONDS = 1448911495.484366 @@ -692,20 +688,25 @@ def test_load_table_from_file_w_explicit_location(self): self.assertEqual( list(sorted(rows)), [('a', 3), ('b', 2), ('c', 1)]) - # Can query from EU. - query_string = 'SELECT MAX(value) FROM `{}.letters`'.format( + # Verify location behavior with queries + query_config = bigquery.QueryJobConfig() + query_config.dry_run = True + + query_string = 'SELECT * FROM `{}.letters` LIMIT 1'.format( dataset.dataset_id) - max_value = list(client.query(query_string, location='EU'))[0][0] - self.assertEqual(max_value, 3) + + eu_query = client.query( + query_string, + location='EU', + job_config=query_config) + self.assertTrue(eu_query.done) # Cannot query from US. with self.assertRaises(BadRequest): - list(client.query(query_string, location='US')) - - # Can copy from EU. - copy_job = client.copy_table( - table_ref, dataset.table('letters2'), location='EU') - copy_job.result() + list(client.query( + query_string, + location='US', + job_config=query_config)) # Cannot copy from US. with self.assertRaises(BadRequest): @@ -713,13 +714,6 @@ def test_load_table_from_file_w_explicit_location(self): table_ref, dataset.table('letters2_us'), location='US').result() - # Can extract from EU. - extract_job = client.extract_table( - table_ref, - 'gs://{}/letters.csv'.format(bucket_name), - location='EU') - extract_job.result() - # Cannot extract from US. with self.assertRaises(BadRequest): client.extract_table( From 5644f15c269f3b1d557bc15d7b9961272b757eb1 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 16 May 2018 10:14:30 -0700 Subject: [PATCH 07/75] Modify system tests to use prerelease versions of grpcio (#5304) --- bigquery/nox.py | 3 +++ bigquery_datatransfer/nox.py | 3 +++ bigtable/nox.py | 3 +++ container/nox.py | 3 +++ dataproc/nox.py | 3 +++ datastore/google/cloud/datastore/__init__.py | 2 +- datastore/nox.py | 3 +++ dlp/nox.py | 3 +++ error_reporting/nox.py | 3 +++ firestore/nox.py | 3 +++ language/nox.py | 3 +++ logging/nox.py | 3 +++ monitoring/nox.py | 3 +++ pubsub/nox.py | 3 +++ spanner/nox.py | 3 +++ speech/nox.py | 3 +++ storage/nox.py | 3 +++ translate/nox.py | 3 +++ vision/nox.py | 3 +++ 19 files changed, 55 insertions(+), 1 deletion(-) diff --git a/bigquery/nox.py b/bigquery/nox.py index bcf20c077f53..a286b8651462 100644 --- a/bigquery/nox.py +++ b/bigquery/nox.py @@ -93,6 +93,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/bigquery_datatransfer/nox.py b/bigquery_datatransfer/nox.py index c59f64b19e2f..5058fabc8067 100644 --- a/bigquery_datatransfer/nox.py +++ b/bigquery_datatransfer/nox.py @@ -86,6 +86,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest') diff --git a/bigtable/nox.py b/bigtable/nox.py index 3b9a2aad12f4..fe446a4a3a81 100644 --- a/bigtable/nox.py +++ b/bigtable/nox.py @@ -81,6 +81,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/container/nox.py b/container/nox.py index 2da528928e6c..fefeec276993 100644 --- a/container/nox.py +++ b/container/nox.py @@ -77,6 +77,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest') diff --git a/dataproc/nox.py b/dataproc/nox.py index 298196b30399..d7c6a3f3e5c5 100644 --- a/dataproc/nox.py +++ b/dataproc/nox.py @@ -74,6 +74,9 @@ def system(session, py): session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + session.install('pytest') session.install('-e', '.') diff --git a/datastore/google/cloud/datastore/__init__.py b/datastore/google/cloud/datastore/__init__.py index 53494f9f8785..8ceee43f0f74 100644 --- a/datastore/google/cloud/datastore/__init__.py +++ b/datastore/google/cloud/datastore/__init__.py @@ -28,7 +28,7 @@ >>> entity['question'] = u'Life, universe?' # Explicit unicode for text >>> entity['answer'] = 42 >>> entity - + >>> query = client.query(kind='EntityKind') The main concepts with this API are: diff --git a/datastore/nox.py b/datastore/nox.py index f6a123835a5f..e59206d485fc 100644 --- a/datastore/nox.py +++ b/datastore/nox.py @@ -82,6 +82,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/dlp/nox.py b/dlp/nox.py index 896c6a2363be..f7e6cb760ce0 100644 --- a/dlp/nox.py +++ b/dlp/nox.py @@ -75,6 +75,9 @@ def system(session, py): session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + session.install('pytest') session.install('-e', '.') diff --git a/error_reporting/nox.py b/error_reporting/nox.py index c736b1676673..e734e928392c 100644 --- a/error_reporting/nox.py +++ b/error_reporting/nox.py @@ -108,6 +108,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/firestore/nox.py b/firestore/nox.py index d841a999caab..87aa2d1a8157 100644 --- a/firestore/nox.py +++ b/firestore/nox.py @@ -83,6 +83,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/language/nox.py b/language/nox.py index 6b070d9c2e5b..dd071c038292 100644 --- a/language/nox.py +++ b/language/nox.py @@ -81,6 +81,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/logging/nox.py b/logging/nox.py index c02870ba753b..e977fe299748 100644 --- a/logging/nox.py +++ b/logging/nox.py @@ -100,6 +100,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/monitoring/nox.py b/monitoring/nox.py index a22dd18a7a5e..468572b7590a 100644 --- a/monitoring/nox.py +++ b/monitoring/nox.py @@ -86,6 +86,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/pubsub/nox.py b/pubsub/nox.py index 7deabf7dbfd0..f772b2b33d57 100644 --- a/pubsub/nox.py +++ b/pubsub/nox.py @@ -81,6 +81,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/spanner/nox.py b/spanner/nox.py index 360333449c3f..98f009c30b2c 100644 --- a/spanner/nox.py +++ b/spanner/nox.py @@ -83,6 +83,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/speech/nox.py b/speech/nox.py index 9932ac3e295f..ee5edd578ef0 100644 --- a/speech/nox.py +++ b/speech/nox.py @@ -79,6 +79,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/storage/nox.py b/storage/nox.py index 64967185e939..8c3a55def220 100644 --- a/storage/nox.py +++ b/storage/nox.py @@ -82,6 +82,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/translate/nox.py b/translate/nox.py index c8e3a6e48c0a..a5e985f89f4b 100644 --- a/translate/nox.py +++ b/translate/nox.py @@ -76,6 +76,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package into the # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) diff --git a/vision/nox.py b/vision/nox.py index 73434d095b2c..bd9161231f50 100644 --- a/vision/nox.py +++ b/vision/nox.py @@ -81,6 +81,9 @@ def system(session, py): # Set the virtualenv dirname. session.virtualenv_dirname = 'sys-' + py + # Use pre-release gRPC for system tests. + session.install('--pre', 'grpcio') + # Install all test dependencies, then install this package in-place. session.install('pytest', '../core/', '../storage/') session.install('../test_utils/') From 1b4cf400d8a01ca5a0d19111c4938ff06800163b Mon Sep 17 00:00:00 2001 From: Aneep Tandel Date: Thu, 17 May 2018 01:16:54 +0530 Subject: [PATCH 08/75] BigTable: Modify system test for new GAPIC code (#5302) * Provide new auto-generated layer for Bigtable. * Change bigtable_pb2 imports to use from gapic library. * Add retry for read rows * Add parameter start_inclusive to _create_row_request * Add retry for Deadline Exceeded on read rows * Refactor yield_rows retry * Add grpc google iam v1 on setup.py on bigtable * Change routing_header to use to_grpc_metadata --- bigtable/google/cloud/bigtable/client.py | 65 ++-- bigtable/google/cloud/bigtable/cluster.py | 10 +- bigtable/google/cloud/bigtable/instance.py | 49 ++- bigtable/tests/system.py | 16 +- bigtable/tests/unit/_testing.py | 14 + bigtable/tests/unit/test_client.py | 86 ++++-- bigtable/tests/unit/test_cluster.py | 67 ++-- bigtable/tests/unit/test_column_family.py | 41 +-- bigtable/tests/unit/test_instance.py | 132 ++++++-- bigtable/tests/unit/test_row.py | 63 ++-- bigtable/tests/unit/test_table.py | 342 ++++++++++++++------- 11 files changed, 601 insertions(+), 284 deletions(-) diff --git a/bigtable/google/cloud/bigtable/client.py b/bigtable/google/cloud/bigtable/client.py index 0e6ed808e102..7d509170f8b9 100644 --- a/bigtable/google/cloud/bigtable/client.py +++ b/bigtable/google/cloud/bigtable/client.py @@ -29,13 +29,21 @@ """ -from google.cloud.bigtable.instance import Instance -from google.cloud.bigtable.instance import _EXISTING_INSTANCE_LOCATION_ID +from google.api_core.gapic_v1 import client_info from google.cloud import bigtable_v2 from google.cloud import bigtable_admin_v2 +from google.cloud.bigtable import __version__ +from google.cloud.bigtable.instance import Instance +from google.cloud.bigtable.instance import _EXISTING_INSTANCE_LOCATION_ID + +from google.cloud.client import ClientWithProject + +_CLIENT_INFO = client_info.ClientInfo( + client_library_version=__version__) +SPANNER_ADMIN_SCOPE = 'https://www.googleapis.com/auth/spanner.admin' ADMIN_SCOPE = 'https://www.googleapis.com/auth/bigtable.admin' """Scope for interacting with the Cluster Admin and Table Admin APIs.""" DATA_SCOPE = 'https://www.googleapis.com/auth/bigtable.data' @@ -44,7 +52,7 @@ """Scope for reading table data.""" -class Client(object): +class Client(ClientWithProject): """Client for interacting with Google Cloud Bigtable API. .. note:: @@ -81,6 +89,9 @@ class Client(object): :raises: :class:`ValueError ` if both ``read_only`` and ``admin`` are :data:`True` """ + _table_data_client = None + _table_admin_client = None + _instance_admin_client = None def __init__(self, project=None, credentials=None, read_only=False, admin=False, channel=None): @@ -90,13 +101,11 @@ def __init__(self, project=None, credentials=None, # NOTE: We set the scopes **before** calling the parent constructor. # It **may** use those scopes in ``with_scopes_if_required``. - self.project = project self._read_only = bool(read_only) self._admin = bool(admin) self._channel = channel - self._credentials = credentials self.SCOPE = self._get_scopes() - super(Client, self).__init__() + super(Client, self).__init__(project=project, credentials=credentials) def _get_scopes(self): """Get the scopes corresponding to admin / read-only state. @@ -130,21 +139,27 @@ def project_path(self): :rtype: str :returns: Return a fully-qualified project string. """ - instance_client = self._instance_admin_client + instance_client = self.instance_admin_client return instance_client.project_path(self.project) @property - def _table_data_client(self): + def table_data_client(self): """Getter for the gRPC stub used for the Table Admin API. :rtype: :class:`.bigtable_v2.BigtableClient` :returns: A BigtableClient object. """ - return bigtable_v2.BigtableClient(channel=self._channel, - credentials=self._credentials) + if self._table_data_client is None: + if not self._admin: + raise ValueError('Client is not an admin client.') + self._table_data_client = ( + bigtable_v2.BigtableClient(credentials=self._credentials, + client_info=_CLIENT_INFO)) + + return self._table_data_client @property - def _table_admin_client(self): + def table_admin_client(self): """Getter for the gRPC stub used for the Table Admin API. :rtype: :class:`.bigtable_admin_pb2.BigtableTableAdmin` @@ -153,13 +168,17 @@ def _table_admin_client(self): client is not an admin client or if it has not been :meth:`start`-ed. """ - if not self._admin: - raise ValueError('Client is not an admin client.') - return bigtable_admin_v2.BigtableTableAdminClient( - channel=self._channel, credentials=self._credentials) + if self._table_admin_client is None: + if not self._admin: + raise ValueError('Client is not an admin client.') + self._table_admin_client = ( + bigtable_admin_v2.BigtableTableAdminClient( + credentials=self._credentials, client_info=_CLIENT_INFO)) + + return self._table_admin_client @property - def _instance_admin_client(self): + def instance_admin_client(self): """Getter for the gRPC stub used for the Table Admin API. :rtype: :class:`.bigtable_admin_pb2.BigtableInstanceAdmin` @@ -168,10 +187,14 @@ def _instance_admin_client(self): client is not an admin client or if it has not been :meth:`start`-ed. """ - if not self._admin: - raise ValueError('Client is not an admin client.') - return bigtable_admin_v2.BigtableInstanceAdminClient( - channel=self._channel, credentials=self._credentials) + if self._instance_admin_client is None: + if not self._admin: + raise ValueError('Client is not an admin client.') + self._instance_admin_client = ( + bigtable_admin_v2.BigtableInstanceAdminClient( + credentials=self._credentials, client_info=_CLIENT_INFO)) + + return self._instance_admin_client def instance(self, instance_id, location=_EXISTING_INSTANCE_LOCATION_ID, display_name=None): @@ -202,4 +225,4 @@ def list_instances(self): :rtype: :class:`~google.api_core.page_iterator.Iterator` :returns: A list of Instance. """ - return self._instance_admin_client.list_instances(self.project_path) + return self.instance_admin_client.list_instances(self.project_path) diff --git a/bigtable/google/cloud/bigtable/cluster.py b/bigtable/google/cloud/bigtable/cluster.py index 43c200d02512..f5c31d8bc4ce 100644 --- a/bigtable/google/cloud/bigtable/cluster.py +++ b/bigtable/google/cloud/bigtable/cluster.py @@ -69,7 +69,7 @@ def name(self): :rtype: str :returns: The cluster name. """ - return self._instance._client._instance_admin_client.cluster_path( + return self._instance._client.instance_admin_client.cluster_path( self._instance._client.project, self._instance.instance_id, self.cluster_id) @@ -90,7 +90,7 @@ def __ne__(self, other): def reload(self): """Reload the metadata for this cluster.""" - self._instance._client._instance_admin_client.get_cluster(self.name) + self._instance._client.instance_admin_client.get_cluster(self.name) def create(self): """Create this cluster. @@ -113,7 +113,7 @@ def create(self): create operation. """ client = self._instance._client - return client._instance_admin_client.create_cluster( + return client.instance_admin_client.create_cluster( self._instance.name, self.cluster_id, {}) def update(self, location='', serve_nodes=0): @@ -147,7 +147,7 @@ def update(self, location='', serve_nodes=0): update operation. """ client = self._instance._client - return client._instance_admin_client.update_cluster( + return client.instance_admin_client.update_cluster( self.name, location, serve_nodes) def delete(self): @@ -171,4 +171,4 @@ def delete(self): permanently deleted. """ client = self._instance._client - client._instance_admin_client.delete_cluster(self.name) + client.instance_admin_client.delete_cluster(self.name) diff --git a/bigtable/google/cloud/bigtable/instance.py b/bigtable/google/cloud/bigtable/instance.py index 1030aab9b939..8543f88409d4 100644 --- a/bigtable/google/cloud/bigtable/instance.py +++ b/bigtable/google/cloud/bigtable/instance.py @@ -18,13 +18,16 @@ import re from google.cloud.bigtable.table import Table +from google.cloud.bigtable.cluster import DEFAULT_SERVE_NODES from google.cloud.bigtable_admin_v2 import enums +from google.cloud.bigtable_admin_v2.types import instance_pb2 _EXISTING_INSTANCE_LOCATION_ID = 'see-existing-cluster' _INSTANCE_NAME_RE = re.compile(r'^projects/(?P[^/]+)/' r'instances/(?P[a-z][-a-z0-9]*)$') +_STORAGE_TYPE_UNSPECIFIED = enums.StorageType.STORAGE_TYPE_UNSPECIFIED class Instance(object): @@ -59,15 +62,31 @@ class Instance(object): Cloud Console UI. (Must be between 4 and 30 characters.) If this value is not set in the constructor, will fall back to the instance ID. + + :type serve_nodes: int + :param serve_nodes: (Optional) The number of nodes in the instance's + cluster; used to set up the instance's cluster. + + :type default_storage_type: int + :param default_storage_type: (Optional) The default values are + STORAGE_TYPE_UNSPECIFIED = 0: The user did + not specify a storage type. + SSD = 1: Flash (SSD) storage should be + used. + HDD = 2: Magnetic drive (HDD) storage + should be used. """ def __init__(self, instance_id, client, location_id=_EXISTING_INSTANCE_LOCATION_ID, - display_name=None): + display_name=None, serve_nodes=DEFAULT_SERVE_NODES, + default_storage_type=_STORAGE_TYPE_UNSPECIFIED): self.instance_id = instance_id self.display_name = display_name or instance_id self._cluster_location_id = location_id + self._cluster_serve_nodes = serve_nodes self._client = client + self._default_storage_type = default_storage_type @classmethod def from_pb(cls, instance_pb, client): @@ -140,6 +159,15 @@ def __eq__(self, other): def __ne__(self, other): return not self == other + def reload(self): + """Reload the metadata for this instance.""" + instance_pb = self._client._instance_admin_client.get_instance( + self.name) + + # NOTE: _update_from_pb does not check that the project and + # instance ID on the response match the request. + self._update_from_pb(instance_pb) + def create(self): """Create this instance. @@ -160,10 +188,25 @@ def create(self): :returns: The long-running operation corresponding to the create operation. """ + clusters = {} + cluster_id = '{}-cluster'.format(self.instance_id) + cluster_name = self._client._instance_admin_client.cluster_path( + self._client.project, self.instance_id, cluster_id) + location = self._client._instance_admin_client.location_path( + self._client.project, self._cluster_location_id) + cluster = instance_pb2.Cluster( + name=cluster_name, location=location, + serve_nodes=self._cluster_serve_nodes, + default_storage_type=self._default_storage_type) + instance = instance_pb2.Instance( + display_name=self.display_name + ) + clusters[cluster_id] = cluster parent = self._client.project_path + return self._client._instance_admin_client.create_instance( - parent=parent, instance_id=self.instance_id, instance={}, - clusters={}) + parent=parent, instance_id=self.instance_id, instance=instance, + clusters=clusters) def update(self): """Update this instance. diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 12bd102690c0..ff87f1bc9444 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -86,12 +86,13 @@ def setUpModule(): if not Config.IN_EMULATOR: retry = RetryErrors(GrpcRendezvous, error_predicate=_retry_on_unavailable) - instances, failed_locations = retry(Config.CLIENT.list_instances)() - if len(failed_locations) != 0: + instances_response = retry(Config.CLIENT.list_instances)() + + if len(instances_response.failed_locations) != 0: raise ValueError('List instances failed in module set up.') - EXISTING_INSTANCES[:] = instances + EXISTING_INSTANCES[:] = instances_response.instances # After listing, create the test instance. created_op = Config.INSTANCE.create() @@ -116,11 +117,12 @@ def tearDown(self): instance.delete() def test_list_instances(self): - instances, failed_locations = Config.CLIENT.list_instances() - self.assertEqual(failed_locations, []) + instances_response = Config.CLIENT.list_instances() + self.assertEqual(instances_response.failed_locations, []) # We have added one new instance in `setUpModule`. - self.assertEqual(len(instances), len(EXISTING_INSTANCES) + 1) - for instance in instances: + self.assertEqual(len(instances_response.instances), + len(EXISTING_INSTANCES) + 1) + for instance in instances_response.instances: instance_existence = (instance in EXISTING_INSTANCES or instance == Config.INSTANCE) self.assertTrue(instance_existence) diff --git a/bigtable/tests/unit/_testing.py b/bigtable/tests/unit/_testing.py index 06881806de1e..3bae0d9ce4a7 100644 --- a/bigtable/tests/unit/_testing.py +++ b/bigtable/tests/unit/_testing.py @@ -15,9 +15,23 @@ """Mocks used to emulate gRPC generated objects.""" +import mock + + class _FakeStub(object): """Acts as a gPRC stub.""" def __init__(self, *results): self.results = results self.method_calls = [] + + +def _make_credentials(): + import google.auth.credentials + + class _CredentialsWithScopes( + google.auth.credentials.Credentials, + google.auth.credentials.Scoped): + pass + + return mock.Mock(spec=_CredentialsWithScopes) diff --git a/bigtable/tests/unit/test_client.py b/bigtable/tests/unit/test_client.py index 3725b20bb8fe..8efbbbba4cdf 100644 --- a/bigtable/tests/unit/test_client.py +++ b/bigtable/tests/unit/test_client.py @@ -17,27 +17,7 @@ import mock - -def _make_credentials(): - import google.auth.credentials - - class _CredentialsWithScopes( - google.auth.credentials.Credentials, - google.auth.credentials.Scoped): - pass - - return mock.Mock(spec=_CredentialsWithScopes) - - -@mock.patch('google.auth.transport.grpc.secure_authorized_channel') -def _make_channel(secure_authorized_channel): - from google.api_core import grpc_helpers - target = 'example.com:443' - - channel = grpc_helpers.create_channel( - target, credentials=mock.sentinel.credentials) - - return channel +from ._testing import _make_credentials class TestClient(unittest.TestCase): @@ -93,13 +73,14 @@ def test_credentials_getter(self): project = 'PROJECT' client = self._make_one( project=project, credentials=credentials) - self.assertIs(client._credentials, credentials) + self.assertIs(client._credentials, + credentials.with_scopes.return_value) def test_project_name_property(self): credentials = _make_credentials() project = 'PROJECT' - client = self._make_one( - project=project, credentials=credentials, admin=True) + client = self._make_one(project=project, credentials=credentials, + admin=True) project_name = 'projects/' + project self.assertEqual(client.project_path, project_name) @@ -141,20 +122,53 @@ def test_instance_factory_w_explicit_serve_nodes(self): self.assertIs(instance._client, client) def test_admin_client_w_value_error(self): - channel = _make_channel() - client = self._make_one(project=self.PROJECT, channel=channel) + credentials = _make_credentials() + client = self._make_one(project=self.PROJECT, credentials=credentials) with self.assertRaises(ValueError): - client._table_admin_client() + client.table_admin_client() + + with self.assertRaises(ValueError): + client.instance_admin_client() + + def test_table_data_client(self): + credentials = _make_credentials() + client = self._make_one(project=self.PROJECT, credentials=credentials, + admin=True) + + table_data_client = client.table_data_client + self.assertEqual(client._table_data_client, table_data_client) + + client._table_data_client = object() + table_data_client = client.table_data_client + self.assertEqual(client.table_data_client, table_data_client) + + def test_table_admin_client(self): + credentials = _make_credentials() + client = self._make_one(project=self.PROJECT, credentials=credentials, + admin=True) + + table_admin_client = client.table_admin_client + self.assertEqual(client._table_admin_client, table_admin_client) + + client._table_admin_client = object() + table_admin_client = client.table_admin_client + self.assertEqual(client._table_admin_client, table_admin_client) + + def test_table_data_client_w_value_error(self): + credentials = _make_credentials() + client = self._make_one(project=self.PROJECT, credentials=credentials) with self.assertRaises(ValueError): - client._instance_admin_client() + client.table_data_client() def test_list_instances(self): from google.cloud.bigtable_admin_v2.proto import ( instance_pb2 as data_v2_pb2) from google.cloud.bigtable_admin_v2.proto import ( bigtable_instance_admin_pb2 as messages_v2_pb2) + from google.cloud.bigtable_admin_v2.gapic import \ + bigtable_instance_admin_client FAILED_LOCATION = 'FAILED' INSTANCE_ID1 = 'instance-id1' @@ -164,8 +178,10 @@ def test_list_instances(self): INSTANCE_NAME2 = ( 'projects/' + self.PROJECT + '/instances/' + INSTANCE_ID2) - channel = _make_channel() - client = self._make_one(project=self.PROJECT, channel=channel, + credentials = _make_credentials() + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + client = self._make_one(project=self.PROJECT, credentials=credentials, admin=True) # Create response_pb @@ -185,12 +201,14 @@ def test_list_instances(self): ], ) + expected_result = response_pb + # Patch the stub used by the API method. + client._instance_admin_client = api bigtable_instance_stub = ( - client._instance_admin_client.bigtable_instance_admin_stub) + client.instance_admin_client.bigtable_instance_admin_stub) bigtable_instance_stub.ListInstances.side_effect = [response_pb] - expected_result = response_pb # Perform the method and check the result. - result = client.list_instances() - self.assertEqual(result, expected_result) + response = client.list_instances() + self.assertEqual(response, expected_result) diff --git a/bigtable/tests/unit/test_cluster.py b/bigtable/tests/unit/test_cluster.py index d520bd504ee1..ec3887fc3ef0 100644 --- a/bigtable/tests/unit/test_cluster.py +++ b/bigtable/tests/unit/test_cluster.py @@ -17,6 +17,8 @@ import mock +from ._testing import _make_credentials + class TestCluster(unittest.TestCase): @@ -27,16 +29,6 @@ class TestCluster(unittest.TestCase): '/instances/' + INSTANCE_ID + '/clusters/' + CLUSTER_ID) - @mock.patch('google.auth.transport.grpc.secure_authorized_channel') - def _make_channel(self, secure_authorized_channel): - from google.api_core import grpc_helpers - target = 'example.com:443' - - channel = grpc_helpers.create_channel( - target, credentials=mock.sentinel.credentials) - - return channel - @staticmethod def _get_target_class(): from google.cloud.bigtable.cluster import Cluster @@ -80,12 +72,11 @@ def test_constructor_non_default(self): def test_name_property(self): from google.cloud.bigtable.instance import Instance - channel = self._make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) instance = Instance(self.INSTANCE_ID, client) cluster = self._make_one(self.CLUSTER_ID, instance) - instance = Instance(self.INSTANCE_ID, client) self.assertEqual(cluster.name, self.CLUSTER_NAME) @@ -121,11 +112,15 @@ def test___ne__(self): def test_reload(self): from google.cloud.bigtable.cluster import DEFAULT_SERVE_NODES from google.cloud.bigtable.instance import Instance + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) LOCATION = 'LOCATION' - channel = self._make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) instance = Instance(self.INSTANCE_ID, client) cluster = self._make_one(self.CLUSTER_ID, instance) @@ -136,6 +131,7 @@ def test_reload(self): ) # Patch the stub used by the API method. + client._instance_admin_client = api instance_admin_client = client._instance_admin_client instance_stub = instance_admin_client.bigtable_instance_admin_stub instance_stub.GetCluster.side_effect = [response_pb] @@ -154,10 +150,14 @@ def test_create(self): from google.api_core import operation from google.longrunning import operations_pb2 from google.cloud.bigtable.instance import Instance - - channel = self._make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) instance = Instance(self.INSTANCE_ID, client) cluster = self._make_one(self.CLUSTER_ID, instance) @@ -169,6 +169,7 @@ def test_create(self): response_pb = operations_pb2.Operation(name=OP_NAME) # Patch the stub used by the API method. + client._instance_admin_client = api instance_admin_client = client._instance_admin_client instance_stub = instance_admin_client.bigtable_instance_admin_stub instance_stub.CreateCluster.side_effect = [response_pb] @@ -191,15 +192,19 @@ def test_update(self): instance_pb2 as data_v2_pb2) from google.cloud.bigtable_admin_v2.proto import ( bigtable_instance_admin_pb2 as messages_v2_pb2) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) NOW = datetime.datetime.utcnow() NOW_PB = _datetime_to_pb_timestamp(NOW) SERVE_NODES = 81 - channel = self._make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) instance = Instance(self.INSTANCE_ID, client) cluster = self._make_one(self.CLUSTER_ID, instance, serve_nodes=SERVE_NODES) @@ -227,6 +232,7 @@ def test_update(self): ) # Patch the stub used by the API method. + client._instance_admin_client = api instance_admin_client = client._instance_admin_client instance_stub = instance_admin_client.bigtable_instance_admin_stub instance_stub.UpdateCluster.side_effect = [response_pb] @@ -245,10 +251,14 @@ def test_delete(self): from google.protobuf import empty_pb2 from google.cloud.bigtable.cluster import DEFAULT_SERVE_NODES from google.cloud.bigtable.instance import Instance - - channel = self._make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) instance = Instance(self.INSTANCE_ID, client) cluster = self._make_one(self.CLUSTER_ID, instance, serve_nodes=DEFAULT_SERVE_NODES) @@ -257,6 +267,7 @@ def test_delete(self): response_pb = empty_pb2.Empty() # Patch the stub used by the API method. + client._instance_admin_client = api instance_admin_client = client._instance_admin_client instance_stub = instance_admin_client.bigtable_instance_admin_stub instance_stub.DeleteCluster.side_effect = [response_pb] diff --git a/bigtable/tests/unit/test_column_family.py b/bigtable/tests/unit/test_column_family.py index a6178ffd6ba3..9443a198093b 100644 --- a/bigtable/tests/unit/test_column_family.py +++ b/bigtable/tests/unit/test_column_family.py @@ -16,16 +16,7 @@ import mock - -@mock.patch('google.auth.transport.grpc.secure_authorized_channel') -def _make_channel(secure_authorized_channel): - from google.api_core import grpc_helpers - target = 'example.com:443' - - channel = grpc_helpers.create_channel( - target, credentials=mock.sentinel.credentials) - - return channel +from ._testing import _make_credentials class TestMaxVersionsGCRule(unittest.TestCase): @@ -374,6 +365,8 @@ def _create_test_helper(self, gc_rule=None): from google.cloud.bigtable_admin_v2.proto import ( bigtable_table_admin_pb2 as table_admin_v2_pb2) from tests.unit._testing import _FakeStub + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) project_id = 'project-id' zone = 'zone' @@ -383,9 +376,10 @@ def _create_test_helper(self, gc_rule=None): table_name = ('projects/' + project_id + '/zones/' + zone + '/clusters/' + cluster_id + '/tables/' + table_id) - channel = _make_channel() - client = self._make_client(project=project_id, channel=channel, - admin=True) + api = bigtable_table_admin_client.BigtableTableAdminClient(mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=project_id, + credentials=credentials, admin=True) table = _Table(table_name, client=client) column_family = self._make_one( column_family_id, table, gc_rule=gc_rule) @@ -407,6 +401,7 @@ def _create_test_helper(self, gc_rule=None): # Patch the stub used by the API method. stub = _FakeStub(response_pb) + client._table_admin_client = api client._table_admin_client.bigtable_table_admin_stub = stub # Create expected_result. @@ -430,6 +425,8 @@ def _update_test_helper(self, gc_rule=None): from tests.unit._testing import _FakeStub from google.cloud.bigtable_admin_v2.proto import ( bigtable_table_admin_pb2 as table_admin_v2_pb2) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) project_id = 'project-id' zone = 'zone' @@ -439,9 +436,10 @@ def _update_test_helper(self, gc_rule=None): table_name = ('projects/' + project_id + '/zones/' + zone + '/clusters/' + cluster_id + '/tables/' + table_id) - channel = _make_channel() - client = self._make_client(project=project_id, channel=channel, - admin=True) + api = bigtable_table_admin_client.BigtableTableAdminClient(mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=project_id, + credentials=credentials, admin=True) table = _Table(table_name, client=client) column_family = self._make_one( column_family_id, table, gc_rule=gc_rule) @@ -463,6 +461,7 @@ def _update_test_helper(self, gc_rule=None): # Patch the stub used by the API method. stub = _FakeStub(response_pb) + client._table_admin_client = api client._table_admin_client.bigtable_table_admin_stub = stub # Create expected_result. @@ -487,6 +486,8 @@ def test_delete(self): from google.cloud.bigtable_admin_v2.proto import ( bigtable_table_admin_pb2 as table_admin_v2_pb2) from tests.unit._testing import _FakeStub + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) project_id = 'project-id' zone = 'zone' @@ -496,9 +497,10 @@ def test_delete(self): table_name = ('projects/' + project_id + '/zones/' + zone + '/clusters/' + cluster_id + '/tables/' + table_id) - channel = _make_channel() - client = self._make_client(project=project_id, channel=channel, - admin=True) + api = bigtable_table_admin_client.BigtableTableAdminClient(mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=project_id, + credentials=credentials, admin=True) table = _Table(table_name, client=client) column_family = self._make_one(column_family_id, table) @@ -514,6 +516,7 @@ def test_delete(self): # Patch the stub used by the API method. stub = _FakeStub(response_pb) + client._table_admin_client = api client._table_admin_client.bigtable_table_admin_stub = stub # Create expected_result. diff --git a/bigtable/tests/unit/test_instance.py b/bigtable/tests/unit/test_instance.py index f13b38d824a5..b3654ce8091e 100644 --- a/bigtable/tests/unit/test_instance.py +++ b/bigtable/tests/unit/test_instance.py @@ -17,16 +17,7 @@ import mock - -@mock.patch('google.auth.transport.grpc.secure_authorized_channel') -def _make_channel(secure_authorized_channel): - from google.api_core import grpc_helpers - target = 'example.com:443' - - channel = grpc_helpers.create_channel( - target, credentials=mock.sentinel.credentials) - - return channel +from ._testing import _make_credentials class TestInstance(unittest.TestCase): @@ -162,9 +153,17 @@ def test_from_pb_project_mistmatch(self): klass.from_pb(instance_pb, client) def test_name_property(self): - channel = _make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) + + # Patch the the API method. + client._instance_admin_client = api instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) self.assertEqual(instance.name, self.INSTANCE_NAME) @@ -193,6 +192,44 @@ def test___ne__(self): instance2 = self._make_one('instance_id2', 'client2', self.LOCATION_ID) self.assertNotEqual(instance1, instance2) + def test_reload(self): + from google.cloud.bigtable_admin_v2.proto import ( + instance_pb2 as data_v2_pb2) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) + instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) + + # Create response_pb + DISPLAY_NAME = u'hey-hi-hello' + response_pb = data_v2_pb2.Instance( + display_name=DISPLAY_NAME, + ) + + # Patch the stub used by the API method. + client._instance_admin_client = api + bigtable_instance_stub = ( + client._instance_admin_client.bigtable_instance_admin_stub) + bigtable_instance_stub.GetInstance.side_effect = [response_pb] + + # Create expected_result. + expected_result = None # reload() has no return value. + + # Check Instance optional config values before. + self.assertEqual(instance.display_name, self.INSTANCE_ID) + + # Perform the method and check the result. + result = instance.reload() + self.assertEqual(result, expected_result) + + # Check Instance optional config values before. + self.assertEqual(instance.display_name, DISPLAY_NAME) + def test_create(self): import datetime from google.api_core import operation @@ -202,12 +239,16 @@ def test_create(self): bigtable_instance_admin_pb2 as messages_v2_pb2) from google.cloud._helpers import _datetime_to_pb_timestamp from tests.unit._testing import _FakeStub + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) NOW = datetime.datetime.utcnow() NOW_PB = _datetime_to_pb_timestamp(NOW) - channel = _make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID, display_name=self.DISPLAY_NAME) @@ -225,6 +266,7 @@ def test_create(self): # Patch the stub used by the API method. stub = _FakeStub(response_pb) + client._instance_admin_client = api client._instance_admin_client.bigtable_instance_admin_stub = stub # Perform the method and check the result. @@ -239,10 +281,14 @@ def test_create_w_explicit_serve_nodes(self): from google.api_core import operation from google.longrunning import operations_pb2 from tests.unit._testing import _FakeStub - - channel = _make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) # Create response_pb @@ -250,6 +296,7 @@ def test_create_w_explicit_serve_nodes(self): # Patch the stub used by the API method. stub = _FakeStub(response_pb) + client._instance_admin_client = api client._instance_admin_client.bigtable_instance_admin_stub = stub # Perform the method and check the result. @@ -258,12 +305,20 @@ def test_create_w_explicit_serve_nodes(self): self.assertIsInstance(result, operation.Operation) def test_update(self): - channel = _make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID, display_name=self.DISPLAY_NAME) + # Mock api calls + client._instance_admin_client = api + # Create expected_result. expected_result = None @@ -273,11 +328,19 @@ def test_update(self): self.assertEqual(result, expected_result) def test_delete(self): - channel = _make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + api = bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) + # Mock api calls + client._instance_admin_client = api + # Create expected_result. expected_result = None # delete() has no return value. @@ -291,10 +354,17 @@ def _list_tables_helper(self, table_name=None): table_pb2 as table_data_v2_pb2) from google.cloud.bigtable_admin_v2.proto import ( bigtable_table_admin_pb2 as table_messages_v1_pb2) - - channel = _make_channel() - client = self._make_client(project=self.PROJECT, channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client, bigtable_instance_admin_client) + + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + instance_api = ( + bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock())) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) # Create response_pb @@ -308,6 +378,8 @@ def _list_tables_helper(self, table_name=None): ) # Patch the stub used by the API method. + client._table_admin_client = table_api + client._instance_admin_client = instance_api bigtable_table_stub = ( client._table_admin_client.bigtable_table_admin_stub) bigtable_table_stub.ListTables.side_effect = [response_pb] diff --git a/bigtable/tests/unit/test_row.py b/bigtable/tests/unit/test_row.py index c17ed03cb3d6..39e701adfb26 100644 --- a/bigtable/tests/unit/test_row.py +++ b/bigtable/tests/unit/test_row.py @@ -17,16 +17,7 @@ import mock - -@mock.patch('google.auth.transport.grpc.secure_authorized_channel') -def _make_channel(secure_authorized_channel): - from google.api_core import grpc_helpers - target = 'example.com:443' - - channel = grpc_helpers.create_channel( - target, credentials=mock.sentinel.credentials) - - return channel +from ._testing import _make_credentials class TestRow(unittest.TestCase): @@ -343,6 +334,7 @@ def test_delete_cells_with_string_columns(self): def test_commit(self): from google.protobuf import empty_pb2 + from google.cloud.bigtable_v2.gapic import bigtable_client project_id = 'project-id' row_key = b'row_key' @@ -350,9 +342,10 @@ def test_commit(self): column_family_id = u'column_family_id' column = b'column' - channel = _make_channel() - client = self._make_client(project=project_id, channel=channel, - admin=True) + api = bigtable_client.BigtableClient(mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=project_id, + credentials=credentials, admin=True) table = _Table(table_name, client=client) row = self._make_one(row_key, table) @@ -363,6 +356,7 @@ def test_commit(self): response_pb = empty_pb2.Empty() # Patch the stub used by the API method. + client._table_data_client = api bigtable_stub = client._table_data_client.bigtable_stub bigtable_stub.MutateRow.side_effect = [response_pb] @@ -411,11 +405,12 @@ def test_commit_too_many_mutations(self): def test_commit_no_mutations(self): from tests.unit._testing import _FakeStub + project_id = 'project-id' row_key = b'row_key' - from google.cloud.bigtable_v2 import BigtableClient - channel = _make_channel() - client = BigtableClient(channel=channel) + credentials = _make_credentials() + client = self._make_client(project=project_id, + credentials=credentials, admin=True) table = _Table(None, client=client) row = self._make_one(row_key, table) self.assertEqual(row._pb_mutations, []) @@ -475,6 +470,7 @@ def test__get_mutations(self): def test_commit(self): from google.cloud.bigtable.row_filters import RowSampleFilter + from google.cloud.bigtable_v2.gapic import bigtable_client project_id = 'project-id' row_key = b'row_key' @@ -485,8 +481,9 @@ def test_commit(self): column1 = b'column1' column2 = b'column2' - channel = _make_channel() - client = self._make_client(project=project_id, channel=channel, + api = bigtable_client.BigtableClient(mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=project_id, credentials=credentials, admin=True) table = _Table(table_name, client=client) row_filter = RowSampleFilter(0.33) @@ -501,6 +498,7 @@ def test_commit(self): predicate_matched=predicate_matched) # Patch the stub used by the API method. + client._table_data_client = api bigtable_stub = client._table_data_client.bigtable_stub bigtable_stub.CheckAndMutateRow.side_effect = [[response_pb]] @@ -534,11 +532,12 @@ def test_commit_too_many_mutations(self): def test_commit_no_mutations(self): from tests.unit._testing import _FakeStub + project_id = 'project-id' row_key = b'row_key' - from google.cloud.bigtable_v2 import BigtableClient - channel = _make_channel() - client = BigtableClient(channel=channel) + credentials = _make_credentials() + client = self._make_client(project=project_id, credentials=credentials, + admin=True) table = _Table(None, client=client) filter_ = object() row = self._make_one(row_key, table, filter_=filter_) @@ -625,14 +624,17 @@ def test_increment_cell_value(self): def test_commit(self): from google.cloud._testing import _Monkey from google.cloud.bigtable import row as MUT + from google.cloud.bigtable_v2.gapic import bigtable_client project_id = 'project-id' row_key = b'row_key' table_name = 'projects/more-stuff' column_family_id = u'column_family_id' column = b'column' - channel = _make_channel() - client = self._make_client(project=project_id, channel=channel, + + api = bigtable_client.BigtableClient(mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project=project_id, credentials=credentials, admin=True) table = _Table(table_name, client=client) row = self._make_one(row_key, table) @@ -644,6 +646,9 @@ def test_commit(self): row_responses = [] expected_result = object() + # Patch API calls + client._table_data_client = api + def mock_parse_rmw_row_response(row_response): row_responses.append(row_response) return expected_result @@ -661,8 +666,9 @@ def test_commit_no_rules(self): project_id = 'project-id' row_key = b'row_key' - channel = _make_channel() - client = self._make_client(project=project_id, channel=channel, + + credentials = _make_credentials() + client = self._make_client(project=project_id, credentials=credentials, admin=True) table = _Table(None, client=client) row = self._make_one(row_key, table) @@ -921,13 +927,6 @@ def _ReadModifyWriteRulePB(*args, **kw): return data_v2_pb2.ReadModifyWriteRule(*args, **kw) -class _Client(object): - - data_stub = None - _table_data_client = None - _table_admin_client = None - - class _Instance(object): def __init__(self, client=None): diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 9b8de28935ee..037a57235391 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -18,6 +18,8 @@ import grpc import mock +from ._testing import _make_credentials + class Test___mutate_rows_request(unittest.TestCase): @@ -136,16 +138,6 @@ class TestTable(unittest.TestCase): VALUE = b'value' _json_tests = None - @mock.patch('google.auth.transport.grpc.secure_authorized_channel') - def _make_channel(self, secure_authorized_channel): - from google.api_core import grpc_helpers - target = 'example.com:443' - - channel = grpc_helpers.create_channel( - target, credentials=mock.sentinel.credentials) - - return channel - @staticmethod def _get_target_class(): from google.cloud.bigtable.table import Table @@ -165,9 +157,9 @@ def _make_client(self, *args, **kwargs): return self._get_target_client_class()(*args, **kwargs) def test_constructor(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) table_id = 'table-id' instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) @@ -177,9 +169,9 @@ def test_constructor(self): def test_row_factory_direct(self): from google.cloud.bigtable.row import DirectRow - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) row_key = b'row_key' @@ -192,9 +184,9 @@ def test_row_factory_direct(self): def test_row_factory_conditional(self): from google.cloud.bigtable.row import ConditionalRow - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) row_key = b'row_key' @@ -208,9 +200,9 @@ def test_row_factory_conditional(self): def test_row_factory_append(self): from google.cloud.bigtable.row import AppendRow - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) row_key = b'row_key' @@ -221,36 +213,36 @@ def test_row_factory_append(self): self.assertEqual(row._table, table) def test_row_factory_failure(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) with self.assertRaises(ValueError): table.row(b'row_key', filter_=object(), append=True) def test___eq__(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table1 = self._make_one(self.TABLE_ID, instance) table2 = self._make_one(self.TABLE_ID, instance) self.assertEqual(table1, table2) def test___eq__type_differ(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table1 = self._make_one(self.TABLE_ID, instance) table2 = object() self.assertNotEqual(table1, table2) def test___ne__same_value(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table1 = self._make_one(self.TABLE_ID, instance) table2 = self._make_one(self.TABLE_ID, instance) @@ -263,12 +255,24 @@ def test___ne__(self): self.assertNotEqual(table1, table2) def _create_test_helper(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client, bigtable_table_admin_client) + + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + instance_api = ( + bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock())) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) + # Patch API calls + client._table_admin_client = table_api + client._instance_admin_client = instance_api + # Create expected_result. expected_result = None # create() has no return value. @@ -280,12 +284,20 @@ def test_create(self): self._create_test_helper() def test_delete(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) + + api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) + # Patch API calls + client._table_admin_client = api + # Create expected_result. expected_result = None # delete() has no return value. @@ -294,9 +306,14 @@ def test_delete(self): self.assertEqual(result, expected_result) def _list_column_families_helper(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) + + api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) @@ -307,6 +324,8 @@ def _list_column_families_helper(self): column_families={COLUMN_FAMILY_ID: column_family}, ) + # Patch the stub used by the API method. + client._table_admin_client = api bigtable_table_stub = ( client._table_admin_client.bigtable_table_admin_stub) bigtable_table_stub.GetTable.side_effect = [response_pb] @@ -326,10 +345,16 @@ def test_list_column_families(self): def _read_row_helper(self, chunks, expected_result): from google.cloud._testing import _Monkey from google.cloud.bigtable import table as MUT - - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) + + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) @@ -349,6 +374,8 @@ def mock_create_row_request(table_name, row_key, filter_): response_iterator = iter([response_pb]) # Patch the stub used by the API method. + client._table_data_client = data_api + client._table_admin_client = table_api bigtable_stub = client._table_data_client.bigtable_stub bigtable_stub.ReadRows.side_effect = [response_iterator] @@ -402,11 +429,16 @@ def test_read_row_still_partial(self): def test_mutate_rows(self): from google.rpc.status_pb2 import Status - - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) + + api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) + client._table_admin_client = api table = self._make_one(self.TABLE_ID, instance) response = [Status(code=0), Status(code=1)] @@ -425,10 +457,18 @@ def test_read_rows(self): from google.cloud._testing import _Monkey from google.cloud.bigtable.row_data import PartialRowsData from google.cloud.bigtable import table as MUT - - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) + + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) @@ -466,9 +506,18 @@ def mock_create_row_request(table_name, **kwargs): self.assertEqual(mock_created, [(table.name, created_kwargs)]) def test_yield_retry_rows(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) + + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) @@ -511,9 +560,18 @@ def test_yield_retry_rows(self): self.assertEqual(result.row_key, self.ROW_KEY_2) def test_sample_row_keys(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) + + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_one(self.TABLE_ID, instance) @@ -546,16 +604,6 @@ class Test__RetryableMutateRowsWorker(unittest.TestCase): RETRYABLE_2 = StatusCode.ABORTED.value[0] NON_RETRYABLE = StatusCode.CANCELLED.value[0] - @mock.patch('google.auth.transport.grpc.secure_authorized_channel') - def _make_channel(self, secure_authorized_channel): - from google.api_core import grpc_helpers - target = 'example.com:443' - - channel = grpc_helpers.create_channel( - target, credentials=mock.sentinel.credentials) - - return channel - @staticmethod def _get_target_class_for_worker(): from google.cloud.bigtable.table import _RetryableMutateRowsWorker @@ -601,9 +649,18 @@ def _make_responses(self, codes): return MutateRowsResponse(entries=entries) def test_callable_empty_rows(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) + + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) @@ -614,6 +671,9 @@ def test_callable_empty_rows(self): def test_callable_no_retry_strategy(self): from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) # Setup: # - Mutate 3 rows. @@ -625,9 +685,14 @@ def test_callable_no_retry_strategy(self): # - State of responses_statuses should be # [success, retryable, non-retryable] - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) @@ -659,6 +724,9 @@ def test_callable_no_retry_strategy(self): def test_callable_retry(self): from google.cloud.bigtable.row import DirectRow from google.cloud.bigtable.table import DEFAULT_RETRY + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) # Setup: # - Mutate 3 rows. @@ -671,9 +739,14 @@ def test_callable_retry(self): # - State of responses_statuses should be # [success, success, non-retryable] - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) @@ -708,6 +781,9 @@ def test_callable_retry(self): def test_callable_retry_timeout(self): from google.cloud.bigtable.row import DirectRow from google.cloud.bigtable.table import DEFAULT_RETRY + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) # Setup: # - Mutate 2 rows. @@ -720,9 +796,14 @@ def test_callable_retry_timeout(self): # - By the time deadline is reached, statuses should be # [retryable, retryable] - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) @@ -750,9 +831,15 @@ def test_callable_retry_timeout(self): self.assertEqual(result, expected_result) def test_do_mutate_retryable_rows_empty_rows(self): - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) + + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) @@ -763,6 +850,9 @@ def test_do_mutate_retryable_rows_empty_rows(self): def test_do_mutate_retryable_rows(self): from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) # Setup: # - Mutate 2 rows. @@ -771,9 +861,14 @@ def test_do_mutate_retryable_rows(self): # Expectation: # - Expect [success, non-retryable] - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) @@ -799,6 +894,9 @@ def test_do_mutate_retryable_rows(self): def test_do_mutate_retryable_rows_retry(self): from google.cloud.bigtable.row import DirectRow from google.cloud.bigtable.table import _BigtableRetryableError + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) # Setup: # - Mutate 3 rows. @@ -809,9 +907,14 @@ def test_do_mutate_retryable_rows_retry(self): # - State of responses_statuses should be # [success, retryable, non-retryable] - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) @@ -845,6 +948,9 @@ def test_do_mutate_retryable_rows_retry(self): def test_do_mutate_retryable_rows_second_retry(self): from google.cloud.bigtable.row import DirectRow from google.cloud.bigtable.table import _BigtableRetryableError + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) # Setup: # - Mutate 4 rows. @@ -860,9 +966,14 @@ def test_do_mutate_retryable_rows_second_retry(self): # - Exception contains response whose index should be '3' even though # only two rows were retried. - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) @@ -903,6 +1014,9 @@ def test_do_mutate_retryable_rows_second_retry(self): def test_do_mutate_retryable_rows_second_try(self): from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) # Setup: # - Mutate 4 rows. @@ -914,9 +1028,14 @@ def test_do_mutate_retryable_rows_second_try(self): # - After second try: # [success, non-retryable, non-retryable, success] - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) @@ -955,6 +1074,8 @@ def test_do_mutate_retryable_rows_second_try(self): def test_do_mutate_retryable_rows_second_try_no_retryable(self): from google.cloud.bigtable.row import DirectRow + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) # Setup: # - Mutate 2 rows. @@ -964,9 +1085,12 @@ def test_do_mutate_retryable_rows_second_try_no_retryable(self): # Expectation: # - After second try: [success, non-retryable] - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) @@ -988,10 +1112,18 @@ def test_do_mutate_retryable_rows_second_try_no_retryable(self): def test_do_mutate_retryable_rows_mismatch_num_responses(self): from google.cloud.bigtable.row import DirectRow - - channel = self._make_channel() - client = self._make_client(project='project-id', channel=channel, - admin=True) + from google.cloud.bigtable_v2.gapic import bigtable_client + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_table_admin_client) + + data_api = bigtable_client.BigtableClient(mock.Mock()) + table_api = bigtable_table_admin_client.BigtableTableAdminClient( + mock.Mock()) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api + client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) table = self._make_table(self.TABLE_ID, instance) From 3376f6edaf5a618546ad96b2eedb2001fae22d91 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 16 May 2018 12:58:51 -0700 Subject: [PATCH 09/75] Fix tests after grpcio update (#5333) * Fix tests after grpcio update * add test for close stub for coverage --- api_core/google/api_core/grpc_helpers.py | 4 ++++ api_core/tests/unit/test_grpc_helpers.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/api_core/google/api_core/grpc_helpers.py b/api_core/google/api_core/grpc_helpers.py index 329971e69336..6a2f052f8403 100644 --- a/api_core/google/api_core/grpc_helpers.py +++ b/api_core/google/api_core/grpc_helpers.py @@ -358,3 +358,7 @@ def subscribe(self, callback, try_to_connect=False): def unsubscribe(self, callback): """grpc.Channel.unsubscribe implementation.""" pass + + def close(self): + """grpc.Channel.close implementation.""" + pass diff --git a/api_core/tests/unit/test_grpc_helpers.py b/api_core/tests/unit/test_grpc_helpers.py index a5bcced16614..e5e4311ee732 100644 --- a/api_core/tests/unit/test_grpc_helpers.py +++ b/api_core/tests/unit/test_grpc_helpers.py @@ -375,3 +375,7 @@ def test_subscribe_unsubscribe(self): channel = grpc_helpers.ChannelStub() assert channel.subscribe(None) is None assert channel.unsubscribe(None) is None + + def test_close(self): + channel = grpc_helpers.ChannelStub() + assert channel.close() is None From d2cc789886b609127398a561625d33cb9ddb3d3c Mon Sep 17 00:00:00 2001 From: Julian Early Date: Wed, 16 May 2018 14:09:09 -0700 Subject: [PATCH 10/75] Add '{Blob,Bucket}make_private' method (#5336) Reverses the effect of 'make_public' by removing read permissions for the 'allusers' principal --- storage/google/cloud/storage/blob.py | 12 ++ storage/google/cloud/storage/bucket.py | 50 ++++++++- storage/tests/unit/test_blob.py | 22 +++- storage/tests/unit/test_bucket.py | 139 ++++++++++++++++++++++++ storage/tests/unit/test_notification.py | 2 +- 5 files changed, 221 insertions(+), 4 deletions(-) diff --git a/storage/google/cloud/storage/blob.py b/storage/google/cloud/storage/blob.py index 51359bc4ded1..5d3ebe9af75f 100644 --- a/storage/google/cloud/storage/blob.py +++ b/storage/google/cloud/storage/blob.py @@ -1346,6 +1346,18 @@ def make_public(self, client=None): self.acl.all().grant_read() self.acl.save(client=client) + def make_private(self, client=None): + """Make this blob private by removing `allusers` read access. + Does not revoke access for anything other than the `allusers` group. + + :type client: :class:`~google.cloud.storage.client.Client` or + ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to the ``client`` stored on the blob's bucket. + """ + self.acl.all().revoke_read() + self.acl.save(client=client) + def compose(self, sources, client=None): """Concatenate source blobs into this one. diff --git a/storage/google/cloud/storage/bucket.py b/storage/google/cloud/storage/bucket.py index af3af4e9925a..41ec9e6890c8 100644 --- a/storage/google/cloud/storage/bucket.py +++ b/storage/google/cloud/storage/bucket.py @@ -709,7 +709,7 @@ def copy_blob(self, blob, destination_bucket, new_name=None, path=api_path, query_params=query_params, _target_object=new_blob, - ) + ) if not preserve_acl: new_blob.acl.save(acl={}, client=client) @@ -1328,6 +1328,54 @@ def make_public(self, recursive=False, future=False, client=None): blob.acl.all().grant_read() blob.acl.save(client=client) + def make_private(self, recursive=False, future=False, client=None): + """Undo the `make_public` method and make the bucket private. + + If ``recursive=True`` and the bucket contains more than 256 + objects / blobs this will cowardly refuse to make the objects private. + This is to prevent extremely long runtime of this method. + + :type recursive: bool + :param recursive: If True, this will make all blobs inside the bucket + private as well. + + :type future: bool + :param future: If True, this will make all objects created in the + future private as well. + + :type client: :class:`~google.cloud.storage.client.Client` or + ``NoneType`` + :param client: Optional. The client to use. If not passed, falls back + to the ``client`` stored on the current bucket. + """ + self.acl.all().revoke_read() + self.acl.save(client=client) + + if future: + doa = self.default_object_acl + if not doa.loaded: + doa.reload(client=client) + doa.all().revoke_read() + doa.save(client=client) + + if recursive: + blobs = list(self.list_blobs( + projection='full', + max_results=self._MAX_OBJECTS_FOR_ITERATION + 1, + client=client)) + if len(blobs) > self._MAX_OBJECTS_FOR_ITERATION: + message = ( + 'Refusing to make private recursively with more than ' + '%d objects. If you actually want to make every object ' + 'in this bucket private, please do it on the objects ' + 'yourself.' + ) % (self._MAX_OBJECTS_FOR_ITERATION,) + raise ValueError(message) + + for blob in blobs: + blob.acl.all().revoke_read() + blob.acl.save(client=client) + def generate_upload_policy( self, conditions, expiration=None, client=None): """Create a signed upload policy for uploading objects. diff --git a/storage/tests/unit/test_blob.py b/storage/tests/unit/test_blob.py index 6122b40a844d..3af16ee99663 100644 --- a/storage/tests/unit/test_blob.py +++ b/storage/tests/unit/test_blob.py @@ -2062,6 +2062,24 @@ def test_make_public(self): self.assertEqual(kw[0]['data'], {'acl': permissive}) self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + def test_make_private(self): + BLOB_NAME = 'blob-name' + no_permissions = [] + after = ({'status': http_client.OK}, {'acl': no_permissions}) + connection = _Connection(after) + client = _Client(connection) + bucket = _Bucket(client=client) + blob = self._make_one(BLOB_NAME, bucket=bucket) + blob.acl.loaded = True + blob.make_private() + self.assertEqual(list(blob.acl), no_permissions) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/name/o/%s' % BLOB_NAME) + self.assertEqual(kw[0]['data'], {'acl': no_permissions}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + def test_compose_wo_content_type_set(self): SOURCE_1 = 'source-1' SOURCE_2 = 'source-2' @@ -2103,7 +2121,7 @@ def test_compose_minimal_w_user_project(self): 'method': 'POST', 'path': '/b/name/o/%s/compose' % DESTINATION, 'query_params': {'userProject': USER_PROJECT}, - 'data': { + 'data': { 'sourceObjects': [ {'name': source_1.name}, {'name': source_2.name}, @@ -2143,7 +2161,7 @@ def test_compose_w_additional_property_changes(self): 'method': 'POST', 'path': '/b/name/o/%s/compose' % DESTINATION, 'query_params': {}, - 'data': { + 'data': { 'sourceObjects': [ {'name': source_1.name}, {'name': source_2.name}, diff --git a/storage/tests/unit/test_bucket.py b/storage/tests/unit/test_bucket.py index 92ca3bbf8700..1ce3522d1290 100644 --- a/storage/tests/unit/test_bucket.py +++ b/storage/tests/unit/test_bucket.py @@ -1665,6 +1665,145 @@ def test_make_public_recursive_too_many(self): bucket._MAX_OBJECTS_FOR_ITERATION = 1 self.assertRaises(ValueError, bucket.make_public, recursive=True) + def test_make_private_defaults(self): + NAME = 'name' + no_permissions = [] + after = {'acl': no_permissions, 'defaultObjectAcl': []} + connection = _Connection(after) + client = _Client(connection) + bucket = self._make_one(client=client, name=NAME) + bucket.acl.loaded = True + bucket.default_object_acl.loaded = True + bucket.make_private() + self.assertEqual(list(bucket.acl), no_permissions) + self.assertEqual(list(bucket.default_object_acl), []) + kw = connection._requested + self.assertEqual(len(kw), 1) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], {'acl': after['acl']}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + + def _make_private_w_future_helper(self, default_object_acl_loaded=True): + NAME = 'name' + no_permissions = [] + after1 = {'acl': no_permissions, 'defaultObjectAcl': []} + after2 = {'acl': no_permissions, 'defaultObjectAcl': no_permissions} + if default_object_acl_loaded: + num_requests = 2 + connection = _Connection(after1, after2) + else: + num_requests = 3 + # We return the same value for default_object_acl.reload() + # to consume. + connection = _Connection(after1, after1, after2) + client = _Client(connection) + bucket = self._make_one(client=client, name=NAME) + bucket.acl.loaded = True + bucket.default_object_acl.loaded = default_object_acl_loaded + bucket.make_private(future=True) + self.assertEqual(list(bucket.acl), no_permissions) + self.assertEqual(list(bucket.default_object_acl), no_permissions) + kw = connection._requested + self.assertEqual(len(kw), num_requests) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], {'acl': no_permissions}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + if not default_object_acl_loaded: + self.assertEqual(kw[1]['method'], 'GET') + self.assertEqual(kw[1]['path'], '/b/%s/defaultObjectAcl' % NAME) + # Last could be 1 or 2 depending on `default_object_acl_loaded`. + self.assertEqual(kw[-1]['method'], 'PATCH') + self.assertEqual(kw[-1]['path'], '/b/%s' % NAME) + self.assertEqual(kw[-1]['data'], {'defaultObjectAcl': no_permissions}) + self.assertEqual(kw[-1]['query_params'], {'projection': 'full'}) + + def test_make_private_w_future(self): + self._make_private_w_future_helper(default_object_acl_loaded=True) + + def test_make_private_w_future_reload_default(self): + self._make_private_w_future_helper(default_object_acl_loaded=False) + + def test_make_private_recursive(self): + _saved = [] + + class _Blob(object): + _granted = True + + def __init__(self, bucket, name): + self._bucket = bucket + self._name = name + + @property + def acl(self): + return self + + # Faux ACL methods + def all(self): + return self + + def revoke_read(self): + self._granted = False + + def save(self, client=None): + _saved.append( + (self._bucket, self._name, self._granted, client)) + + def item_to_blob(self, item): + return _Blob(self.bucket, item['name']) + + NAME = 'name' + BLOB_NAME = 'blob-name' + no_permissions = [] + after = {'acl': no_permissions, 'defaultObjectAcl': []} + connection = _Connection(after, {'items': [{'name': BLOB_NAME}]}) + client = _Client(connection) + bucket = self._make_one(client=client, name=NAME) + bucket.acl.loaded = True + bucket.default_object_acl.loaded = True + + with mock.patch('google.cloud.storage.bucket._item_to_blob', + new=item_to_blob): + bucket.make_private(recursive=True) + self.assertEqual(list(bucket.acl), no_permissions) + self.assertEqual(list(bucket.default_object_acl), []) + self.assertEqual(_saved, [(bucket, BLOB_NAME, False, None)]) + kw = connection._requested + self.assertEqual(len(kw), 2) + self.assertEqual(kw[0]['method'], 'PATCH') + self.assertEqual(kw[0]['path'], '/b/%s' % NAME) + self.assertEqual(kw[0]['data'], {'acl': no_permissions}) + self.assertEqual(kw[0]['query_params'], {'projection': 'full'}) + self.assertEqual(kw[1]['method'], 'GET') + self.assertEqual(kw[1]['path'], '/b/%s/o' % NAME) + max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1 + self.assertEqual(kw[1]['query_params'], + {'maxResults': max_results, 'projection': 'full'}) + + def test_make_private_recursive_too_many(self): + NO_PERMISSIONS = [] + AFTER = {'acl': NO_PERMISSIONS, 'defaultObjectAcl': []} + + NAME = 'name' + BLOB_NAME1 = 'blob-name1' + BLOB_NAME2 = 'blob-name2' + GET_BLOBS_RESP = { + 'items': [ + {'name': BLOB_NAME1}, + {'name': BLOB_NAME2}, + ], + } + connection = _Connection(AFTER, GET_BLOBS_RESP) + client = _Client(connection) + bucket = self._make_one(client=client, name=NAME) + bucket.acl.loaded = True + bucket.default_object_acl.loaded = True + + # Make the Bucket refuse to make_private with 2 objects. + bucket._MAX_OBJECTS_FOR_ITERATION = 1 + self.assertRaises(ValueError, bucket.make_private, recursive=True) + def test_page_empty_response(self): from google.api_core import page_iterator diff --git a/storage/tests/unit/test_notification.py b/storage/tests/unit/test_notification.py index 32b7dca1c4dd..9004decc94dc 100644 --- a/storage/tests/unit/test_notification.py +++ b/storage/tests/unit/test_notification.py @@ -36,7 +36,7 @@ class TestBucketNotification(unittest.TestCase): ETAG = 'DEADBEEF' CREATE_PATH = '/b/{}/notificationConfigs'.format(BUCKET_NAME) NOTIFICATION_PATH = '/b/{}/notificationConfigs/{}'.format( - BUCKET_NAME, NOTIFICATION_ID) + BUCKET_NAME, NOTIFICATION_ID) @staticmethod def event_types(): From 9586c98e88cb51e4e852f36ba2eb5ee3c2e1305f Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 16 May 2018 14:55:02 -0700 Subject: [PATCH 11/75] Release 1.2.0 (#5337) --- api_core/CHANGELOG.md | 9 +++++++++ api_core/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/api_core/CHANGELOG.md b/api_core/CHANGELOG.md index d1a281f88b79..ab397270e1f6 100644 --- a/api_core/CHANGELOG.md +++ b/api_core/CHANGELOG.md @@ -4,6 +4,15 @@ [1]: https://pypi.org/project/google-api-core/#history +## 1.2.0 + +### Implementation Changes +- Add close method to grpc Channel (#5333) + +### Internal / Testing Changes +- Fix tests after grpcio update (#5333) +- Add Test runs for Python 3.7 and remove 3.4 (#5295) + ## 1.1.2 ### Packaging diff --git a/api_core/setup.py b/api_core/setup.py index 13309e6a1be6..56ffd6c69f25 100644 --- a/api_core/setup.py +++ b/api_core/setup.py @@ -22,7 +22,7 @@ name = 'google-api-core' description = 'Google API client core library' -version = '1.1.2' +version = '1.2.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 28653c06d7c5961ca93f701e81e92e90110eb363 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 17 May 2018 12:41:33 -0700 Subject: [PATCH 12/75] BigQuery: fix typo in Client docstrings (#5342) Was missing a comma between types in get_job and cancel_job functions. --- bigquery/google/cloud/bigquery/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index a74a2c90c20c..5ac988984617 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -603,7 +603,7 @@ def get_job( Returns: Union[google.cloud.bigquery.job.LoadJob, \ google.cloud.bigquery.job.CopyJob, \ - google.cloud.bigquery.job.ExtractJob \ + google.cloud.bigquery.job.ExtractJob, \ google.cloud.bigquery.job.QueryJob]: Job instance, based on the resource returned by the API. """ @@ -642,7 +642,7 @@ def cancel_job( Returns: Union[google.cloud.bigquery.job.LoadJob, \ google.cloud.bigquery.job.CopyJob, \ - google.cloud.bigquery.job.ExtractJob \ + google.cloud.bigquery.job.ExtractJob, \ google.cloud.bigquery.job.QueryJob]: Job instance, based on the resource returned by the API. """ From 4cea94970a13590f9b79ecc278606e72ff529c8e Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 17 May 2018 12:42:59 -0700 Subject: [PATCH 13/75] BigQuery: NUMERIC type support (#5331) * Support for BigQuery's NUMERIC type, which is currently in alpha. (#4874) * Support for BigQuery's NUMERIC type, which is currently in alpha. * Remove unused import from test_query.py. * Fix newly-added system tests; all system and unit tests pass now. Add unit tests to reach what should be 100% coverage for new code. * Fix lint warning and rename shadowed unit test. * Add unit test that NUMERIC types are encoded correctly in insert_rows * Convert numeric unit test to new mocked connection style. * use var for numeric type in SQL system test --- bigquery/google/cloud/bigquery/_helpers.py | 16 +++++++ .../google/cloud/bigquery/dbapi/_helpers.py | 3 ++ bigquery/google/cloud/bigquery/dbapi/types.py | 2 +- bigquery/google/cloud/bigquery/query.py | 17 ++++--- bigquery/google/cloud/bigquery/schema.py | 7 +-- bigquery/tests/data/characters.json | 2 + bigquery/tests/data/characters.jsonl | 6 +-- bigquery/tests/data/schema.json | 5 ++ bigquery/tests/system.py | 17 +++++++ bigquery/tests/unit/test__helpers.py | 42 +++++++++++++++++ bigquery/tests/unit/test_client.py | 47 +++++++++++++++++++ bigquery/tests/unit/test_dbapi__helpers.py | 2 + bigquery/tests/unit/test_query.py | 14 ++++++ 13 files changed, 166 insertions(+), 14 deletions(-) diff --git a/bigquery/google/cloud/bigquery/_helpers.py b/bigquery/google/cloud/bigquery/_helpers.py index 1733f8e360b9..f67d9802a6bf 100644 --- a/bigquery/google/cloud/bigquery/_helpers.py +++ b/bigquery/google/cloud/bigquery/_helpers.py @@ -16,6 +16,7 @@ import base64 import datetime +import decimal from google.api_core import retry from google.cloud._helpers import UTC @@ -46,6 +47,12 @@ def _float_from_json(value, field): return float(value) +def _decimal_from_json(value, field): + """Coerce 'value' to a Decimal, if set or not nullable.""" + if _not_null(value, field): + return decimal.Decimal(value) + + def _bool_from_json(value, field): """Coerce 'value' to a bool, if set or not nullable.""" if _not_null(value, field): @@ -160,6 +167,7 @@ def _record_from_json(value, field): 'INT64': _int_from_json, 'FLOAT': _float_from_json, 'FLOAT64': _float_from_json, + 'NUMERIC': _decimal_from_json, 'BOOLEAN': _bool_from_json, 'BOOL': _bool_from_json, 'STRING': _string_from_json, @@ -228,6 +236,13 @@ def _float_to_json(value): return value +def _decimal_to_json(value): + """Coerce 'value' to a JSON-compatible representation.""" + if isinstance(value, decimal.Decimal): + value = str(value) + return value + + def _bool_to_json(value): """Coerce 'value' to an JSON-compatible representation.""" if isinstance(value, bool): @@ -293,6 +308,7 @@ def _time_to_json(value): 'INT64': _int_to_json, 'FLOAT': _float_to_json, 'FLOAT64': _float_to_json, + 'NUMERIC': _decimal_to_json, 'BOOLEAN': _bool_to_json, 'BOOL': _bool_to_json, 'BYTES': _bytes_to_json, diff --git a/bigquery/google/cloud/bigquery/dbapi/_helpers.py b/bigquery/google/cloud/bigquery/dbapi/_helpers.py index a5a1add1f3a4..56c6a088672f 100644 --- a/bigquery/google/cloud/bigquery/dbapi/_helpers.py +++ b/bigquery/google/cloud/bigquery/dbapi/_helpers.py @@ -14,6 +14,7 @@ import collections import datetime +import decimal import numbers import six @@ -46,6 +47,8 @@ def scalar_to_query_parameter(value, name=None): parameter_type = 'INT64' elif isinstance(value, numbers.Real): parameter_type = 'FLOAT64' + elif isinstance(value, decimal.Decimal): + parameter_type = 'NUMERIC' elif isinstance(value, six.text_type): parameter_type = 'STRING' elif isinstance(value, six.binary_type): diff --git a/bigquery/google/cloud/bigquery/dbapi/types.py b/bigquery/google/cloud/bigquery/dbapi/types.py index 9636ce68bfc1..feb3e320bcca 100644 --- a/bigquery/google/cloud/bigquery/dbapi/types.py +++ b/bigquery/google/cloud/bigquery/dbapi/types.py @@ -79,6 +79,6 @@ def __eq__(self, other): STRING = 'STRING' BINARY = _DBAPITypeObject('BYTES', 'RECORD', 'STRUCT') NUMBER = _DBAPITypeObject( - 'INTEGER', 'INT64', 'FLOAT', 'FLOAT64', 'BOOLEAN', 'BOOL') + 'INTEGER', 'INT64', 'FLOAT', 'FLOAT64', 'NUMERIC', 'BOOLEAN', 'BOOL') DATETIME = _DBAPITypeObject('TIMESTAMP', 'DATE', 'TIME', 'DATETIME') ROWID = 'ROWID' diff --git a/bigquery/google/cloud/bigquery/query.py b/bigquery/google/cloud/bigquery/query.py index e3bd5c196bec..a3991173f9df 100644 --- a/bigquery/google/cloud/bigquery/query.py +++ b/bigquery/google/cloud/bigquery/query.py @@ -81,10 +81,11 @@ class ScalarQueryParameter(_AbstractQueryParameter): :type type_: str :param type_: name of parameter type. One of 'STRING', 'INT64', - 'FLOAT64', 'BOOL', 'TIMESTAMP', 'DATETIME', or 'DATE'. + 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or + 'DATE'. - :type value: str, int, float, bool, :class:`datetime.datetime`, or - :class:`datetime.date`. + :type value: str, int, float, :class:`decimal.Decimal`, bool, + :class:`datetime.datetime`, or :class:`datetime.date`. :param value: the scalar parameter value. """ def __init__(self, name, type_, value): @@ -99,9 +100,11 @@ def positional(cls, type_, value): :type type_: str :param type_: name of parameter type. One of 'STRING', 'INT64', - 'FLOAT64', 'BOOL', 'TIMESTAMP', 'DATETIME', or 'DATE'. + 'FLOAT64', 'NUMERIC', 'BOOL', 'TIMESTAMP', 'DATETIME', or + 'DATE'. - :type value: str, int, float, bool, :class:`datetime.datetime`, or + :type value: str, int, float, :class:`decimal.Decimal`, bool, + :class:`datetime.datetime`, or :class:`datetime.date`. :param value: the scalar parameter value. @@ -185,7 +188,7 @@ class ArrayQueryParameter(_AbstractQueryParameter): :type array_type: str :param array_type: name of type of array elements. One of `'STRING'`, `'INT64'`, - `'FLOAT64'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. + `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. :type values: list of appropriate scalar type. :param values: the parameter array values. @@ -202,7 +205,7 @@ def positional(cls, array_type, values): :type array_type: str :param array_type: name of type of array elements. One of `'STRING'`, `'INT64'`, - `'FLOAT64'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. + `'FLOAT64'`, `'NUMERIC'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`. :type values: list of appropriate scalar type :param values: the parameter array values. diff --git a/bigquery/google/cloud/bigquery/schema.py b/bigquery/google/cloud/bigquery/schema.py index 5f566025750c..cc1b4a5ff024 100644 --- a/bigquery/google/cloud/bigquery/schema.py +++ b/bigquery/google/cloud/bigquery/schema.py @@ -23,7 +23,8 @@ class SchemaField(object): :type field_type: str :param field_type: the type of the field (one of 'STRING', 'INTEGER', - 'FLOAT', 'BOOLEAN', 'TIMESTAMP' or 'RECORD'). + 'FLOAT', 'NUMERIC', 'BOOLEAN', 'TIMESTAMP' or + 'RECORD'). :type mode: str :param mode: the mode of the field (one of 'NULLABLE', 'REQUIRED', @@ -77,8 +78,8 @@ def name(self): def field_type(self): """str: The type of the field. - Will be one of 'STRING', 'INTEGER', 'FLOAT', 'BOOLEAN', - 'TIMESTAMP' or 'RECORD'. + Will be one of 'STRING', 'INTEGER', 'FLOAT', 'NUMERIC', + 'BOOLEAN', 'TIMESTAMP' or 'RECORD'. """ return self._field_type diff --git a/bigquery/tests/data/characters.json b/bigquery/tests/data/characters.json index ac854fb812d4..d38636810196 100644 --- a/bigquery/tests/data/characters.json +++ b/bigquery/tests/data/characters.json @@ -38,6 +38,7 @@ "TeaTime" : "15:00:00", "Weight" : 198.6, "FavoriteTime" : "2001-12-19T23:59:59", + "FavoriteNumber" : "3.141592654", "IsMagic" : true }, { @@ -47,6 +48,7 @@ "IsMagic" : true, "FavoriteTime" : "2000-10-31T23:27:46", "Age" : "17", + "FavoriteNumber" : "13", "Spells" : [ { "LastUsed" : "2017-02-14 12:07:23 UTC", diff --git a/bigquery/tests/data/characters.jsonl b/bigquery/tests/data/characters.jsonl index 1da3f2309cae..42b5bdc6a152 100644 --- a/bigquery/tests/data/characters.jsonl +++ b/bigquery/tests/data/characters.jsonl @@ -1,3 +1,3 @@ -{"Name":"Bilbo","Age":"111","Weight":67.2,"IsMagic":false,"Spells":[],"TeaTime":"10:00:00","NextVacation":"2017-09-22","FavoriteTime":"2031-04-01T05:09:27"} -{"Name":"Gandalf","Age":"1000","Weight":198.6,"IsMagic":true,"Spells":[{"Name": "Skydragon", "Icon":"iVBORw0KGgoAAAANSUhEUgAAAB4AAAAgCAYAAAAFQMh/AAAAAXNSR0IArs4c6QAAA9lJREFUSA21lk9OVEEQxvsRDImoiMG9mLjjCG5mEg7gEfQGsIcF7p0EDsBBSJiNO7ZsFRZqosb/QkSj7fer7ur33sw8GDFUUq+7q6vqq6qu7pkQzqG4EeI521e7FePVgM9cGPYwhCi6UO8qFOK+YY+Br66ujsmmxb84Yzwp6zCsxjJfWVkxnMsEMGuWHZ9Wcz11cM48hkq0vLwc1tbW4mAwqDpcdIqnMmgF0JMv2CiGnZ2dcHR0FA4PD8Pe3t5U/tx6bCSlb+JT8XfxT3HsUek0Li0tRdjWl+z6iRF+FNA1hXPDQ/IMNyRg3s8bD/OaZS+VP+9cOLSa64cA34oXZWagDkRzAaJxXaE+ufc4rCN7LrazZ2+8+STtpAL8WYDvpTaHKlkB2iQARMvb2+H27m4YaL7zaDtUw1BZAASi6T8T2UZnPZV2pvnJfCH5p8bewcGB6TrIfz8wBZgHQ83kjpuj6RBYQpuo09Tvmpd7TPe+ktZN8cKwS92KWXGuaqWowlYEwthtMcWOZUNJc8at+zuF/Xkqo69baS7P+AvWjYwJ4jyHXXsEnd74ZO/Pq+uXUuv6WNlso6cvnDsZB1V/unJab3D1/KrJDw9NCM9wHf2FK2ejTKMejnBHfGtfH7LGGCdQDqaqJgfgzWjXK1nYV4jRbPGnxUT7cqUaZfJrVZeOm9QmB21L6xXgbu/ScsYusJFMoU0x2fsamRJOd6kOYDRLUxv94ENZe8+0gM+0dyz+KgU7X8rLHHCIOZyrna4y6ykIu0YCs02TBXmk3PZssmEgaTxTo83xjCIjoE21h0Yah3MrV4+9kR8MaabGze+9NEILGAFE5nMOiiA32KnAr/sb7tED3nzlzC4dB38WMC+EjaqHfqvUKHi2gJPdWQ6AbH8hgyQ7QY6jvjj3QZWvX6pUAtduTX5Dss96Q7NI9RQRJeeKvRFbt0v2gb1Gx/PooJsztn1c1DqpAU3Hde2dB2aEHBhjgOFjMeDvxLafjQ3YZQSgOcHJZX611H45sGLHWvYTz9hiURlpNoBZvxb/Ft9lAQ1DmBfUiR+j1hAPkMBTE9L9+zLva1QvGFHurRBaZ5xLVitoBviiRkD/sIMDztKA5FA0b9/0OclzO2/XAQymJ0TcghZwEo9/AX8gMeAJMOvIsWWt5bwCoiFhVSllrdH0t5Q1JHAFlKJNkvTVdn2GHb9KdmacMT+d/Os05imJUccRX2YuZ93Sxf0Ilc4DPDeAq5SAvFEAY94cQc6BA26dzb4HWAJI4DPmQE5KCVUyvb2FcDZem7JdT2ggKUP3xX6n9XNq1DpzSf4Cy4ZqSlmM8d8AAAAASUVORK5CYII=","DiscoveredBy":"Firebreather","Properties":[{"Name":"Flying","Power":1},{"Name":"Creature","Power":1},{"Name":"Explodey","Power":11}],"LastUsed":"2015-10-31 23:59:56 UTC"}],"TeaTime":"15:00:00","NextVacation":"2666-06-06","FavoriteTime":"2001-12-19T23:59:59"} -{"Name":"Sabrina","Age":"17","Weight":128.3,"IsMagic":true,"Spells":[{"Name": "Talking cats", "Icon":"iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9UAAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04ACUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyRUV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKPG/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAAUgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4iSgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJy3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3NfXW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rwMqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC","DiscoveredBy":"Salem","Properties":[{"Name":"Makes you look crazy","Power":1}],"LastUsed":"2017-02-14 12:07:23 UTC"}],"TeaTime":"12:00:00","NextVacation":"2017-03-14","FavoriteTime":"2000-10-31T23:27:46"} +{"Name":"Bilbo","Age":"111","Weight":67.2,"IsMagic":false,"Spells":[],"TeaTime":"10:00:00","NextVacation":"2017-09-22","FavoriteTime":"2031-04-01T05:09:27","FavoriteNumber":"111"} +{"Name":"Gandalf","Age":"1000","Weight":198.6,"IsMagic":true,"Spells":[{"Name": "Skydragon", "Icon":"iVBORw0KGgoAAAANSUhEUgAAAB4AAAAgCAYAAAAFQMh/AAAAAXNSR0IArs4c6QAAA9lJREFUSA21lk9OVEEQxvsRDImoiMG9mLjjCG5mEg7gEfQGsIcF7p0EDsBBSJiNO7ZsFRZqosb/QkSj7fer7ur33sw8GDFUUq+7q6vqq6qu7pkQzqG4EeI521e7FePVgM9cGPYwhCi6UO8qFOK+YY+Br66ujsmmxb84Yzwp6zCsxjJfWVkxnMsEMGuWHZ9Wcz11cM48hkq0vLwc1tbW4mAwqDpcdIqnMmgF0JMv2CiGnZ2dcHR0FA4PD8Pe3t5U/tx6bCSlb+JT8XfxT3HsUek0Li0tRdjWl+z6iRF+FNA1hXPDQ/IMNyRg3s8bD/OaZS+VP+9cOLSa64cA34oXZWagDkRzAaJxXaE+ufc4rCN7LrazZ2+8+STtpAL8WYDvpTaHKlkB2iQARMvb2+H27m4YaL7zaDtUw1BZAASi6T8T2UZnPZV2pvnJfCH5p8bewcGB6TrIfz8wBZgHQ83kjpuj6RBYQpuo09Tvmpd7TPe+ktZN8cKwS92KWXGuaqWowlYEwthtMcWOZUNJc8at+zuF/Xkqo69baS7P+AvWjYwJ4jyHXXsEnd74ZO/Pq+uXUuv6WNlso6cvnDsZB1V/unJab3D1/KrJDw9NCM9wHf2FK2ejTKMejnBHfGtfH7LGGCdQDqaqJgfgzWjXK1nYV4jRbPGnxUT7cqUaZfJrVZeOm9QmB21L6xXgbu/ScsYusJFMoU0x2fsamRJOd6kOYDRLUxv94ENZe8+0gM+0dyz+KgU7X8rLHHCIOZyrna4y6ykIu0YCs02TBXmk3PZssmEgaTxTo83xjCIjoE21h0Yah3MrV4+9kR8MaabGze+9NEILGAFE5nMOiiA32KnAr/sb7tED3nzlzC4dB38WMC+EjaqHfqvUKHi2gJPdWQ6AbH8hgyQ7QY6jvjj3QZWvX6pUAtduTX5Dss96Q7NI9RQRJeeKvRFbt0v2gb1Gx/PooJsztn1c1DqpAU3Hde2dB2aEHBhjgOFjMeDvxLafjQ3YZQSgOcHJZX611H45sGLHWvYTz9hiURlpNoBZvxb/Ft9lAQ1DmBfUiR+j1hAPkMBTE9L9+zLva1QvGFHurRBaZ5xLVitoBviiRkD/sIMDztKA5FA0b9/0OclzO2/XAQymJ0TcghZwEo9/AX8gMeAJMOvIsWWt5bwCoiFhVSllrdH0t5Q1JHAFlKJNkvTVdn2GHb9KdmacMT+d/Os05imJUccRX2YuZ93Sxf0Ilc4DPDeAq5SAvFEAY94cQc6BA26dzb4HWAJI4DPmQE5KCVUyvb2FcDZem7JdT2ggKUP3xX6n9XNq1DpzSf4Cy4ZqSlmM8d8AAAAASUVORK5CYII=","DiscoveredBy":"Firebreather","Properties":[{"Name":"Flying","Power":1},{"Name":"Creature","Power":1},{"Name":"Explodey","Power":11}],"LastUsed":"2015-10-31 23:59:56 UTC"}],"TeaTime":"15:00:00","NextVacation":"2666-06-06","FavoriteTime":"2001-12-19T23:59:59","FavoriteNumber":"1.618033989"} +{"Name":"Sabrina","Age":"17","Weight":128.3,"IsMagic":true,"Spells":[{"Name": "Talking cats", "Icon":"iVBORw0KGgoAAAANSUhEUgAAAEAAAABACAYAAACqaXHeAAAAAXNSR0IArs4c6QAAABxpRE9UAAAAAgAAAAAAAAAgAAAAKAAAACAAAAAgAAABxj2CfowAAAGSSURBVHgB7Jc9TsNAEIX3JDkCPUV6KlpKFHEGlD4nyA04ACUXQKTgCEipUnKGNEbP0otentayicZ24SlWs7tjO/N9u/5J2b2+NUtuZcnwYE8BuQPyGZAPwXwLLPk5kG+BJa9+fgfkh1B+CeancL4F8i2Q/wWm/S/w+XFoTseftn0dvhu0OXfhpM+AGvzcEiYVAFisPqE9zrETJhHAlXfg2lglMK9z0f3RBfB+ZyRUV3x+erzsEIjjOBqc1xtNAIrvguybV3A9lkVHxlEE6GrrPb/ZvAySwlUnfCmlPQ+R8JCExvGtcRQBLFwj4FGkznX1VYDKPG/f2/MjwCksXACgdNUxJjwK9xwl4JihOwTFR0kIF+CABEPRnvsvPFctMoYKqAFSAFaMwB4pp3Y+bodIYL9WmIAaIOHxo7W8wiHvAjTvhUeNwwSgeAeAABbqOewC5hBdwFD4+9+7puzXV9fS6/b1wwT4tsaYAhwOOQdUQch5vgZCeAhAv3ZM31yYAAUgvApQQQ6n5w6FB/RVe1jdJOAPAAD//1eMQwoAAAGQSURBVO1UMU4DQQy8X9AgWopIUINEkS4VlJQo4gvwAV7AD3gEH4iSgidESpWSXyyZExP5lr0c7K5PsXBhec/2+jzjuWtent9CLdtu1mG5+gjz+WNr7IsY7eH+tvO+xfuqk4vz7CH91edFaF5v9nb6dBKm13edvrL+0Lk5lMzJkQDeJSkkgHF6mR8CHwMHCQR/NAQQGD0BAlwK4FCefQiefq+A2Vn29tG7igLAfmwcnJu/nJy3BMQkMN9HEPr8AL3bfBv7Bp+7/SoExMDjZwKEJwmyhnnmQIQEBIlz2x0iKoAvJkAC6TsTIH6MqRrEWUMSZF2zAwqT4Eu/e6pzFAIkmNSZ4OFT+VYBIIF//UqbJwnF/4DU0GwOn8r/JQYCpPGufEfJuZiA37ycQw/5uFeqPq4pfR6FADmkBCXjfWdZj3NfXW58dAJyB9W65wRoMWulryvAyqa05nQFaDFrpa8rwMqmtOZ0BWgxa6WvK8DKprTmdAVoMWulryvAyqa05nQFaDFrpa8rwMqmtOb89wr4AtQ4aPoL6yVpAAAAAElFTkSuQmCC","DiscoveredBy":"Salem","Properties":[{"Name":"Makes you look crazy","Power":1}],"LastUsed":"2017-02-14 12:07:23 UTC"}],"TeaTime":"12:00:00","NextVacation":"2017-03-14","FavoriteTime":"2000-10-31T23:27:46","FavoriteNumber":"13"} diff --git a/bigquery/tests/data/schema.json b/bigquery/tests/data/schema.json index 303076123dd9..6a36e55e579a 100644 --- a/bigquery/tests/data/schema.json +++ b/bigquery/tests/data/schema.json @@ -78,6 +78,11 @@ "mode" : "NULLABLE", "name" : "FavoriteTime", "type" : "DATETIME" + }, + { + "mode" : "NULLABLE", + "name" : "FavoriteNumber", + "type" : "NUMERIC" } ] } diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 7d79541c2729..4ab6c8b8c9b4 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -16,6 +16,7 @@ import concurrent.futures import csv import datetime +import decimal import json import operator import os @@ -959,6 +960,7 @@ def _generate_standard_sql_types_examples(self): stamp_microseconds = stamp + '.250000' zoned = naive.replace(tzinfo=UTC) zoned_microseconds = naive_microseconds.replace(tzinfo=UTC) + numeric = decimal.Decimal('123456789.123456789') return [ { 'sql': 'SELECT 1', @@ -1005,6 +1007,10 @@ def _generate_standard_sql_types_examples(self): 'sql': 'SELECT TIME(TIMESTAMP "%s")' % (stamp,), 'expected': naive.time(), }, + { + 'sql': 'SELECT NUMERIC "%s"' % (numeric,), + 'expected': numeric, + }, { 'sql': 'SELECT (1, 2)', 'expected': {'_field_1': 1, '_field_2': 2}, @@ -1257,6 +1263,10 @@ def test_query_w_query_params(self): pi = 3.1415926 pi_param = ScalarQueryParameter( name='pi', type_='FLOAT64', value=pi) + pi_numeric = decimal.Decimal('3.141592654') + pi_numeric_param = ScalarQueryParameter( + name='pi_numeric_param', type_='NUMERIC', + value=pi_numeric) truthy = True truthy_param = ScalarQueryParameter( name='truthy', type_='BOOL', value=truthy) @@ -1332,6 +1342,11 @@ def test_query_w_query_params(self): 'expected': pi, 'query_parameters': [pi_param], }, + { + 'sql': 'SELECT @pi_numeric_param', + 'expected': pi_numeric, + 'query_parameters': [pi_numeric_param], + }, { 'sql': 'SELECT @truthy', 'expected': truthy, @@ -1771,6 +1786,8 @@ def test_create_table_rows_fetch_nested_schema(self): '%Y-%m-%dT%H:%M:%S') e_favtime = datetime.datetime(*parts[0:6]) self.assertEqual(found[7], e_favtime) + self.assertEqual(found[8], + decimal.Decimal(expected['FavoriteNumber'])) def _fetch_dataframe(self, query): return Config.CLIENT.query(query).result().to_dataframe() diff --git a/bigquery/tests/unit/test__helpers.py b/bigquery/tests/unit/test__helpers.py index 0e407551f395..b4899094dc93 100644 --- a/bigquery/tests/unit/test__helpers.py +++ b/bigquery/tests/unit/test__helpers.py @@ -14,6 +14,7 @@ import base64 import datetime +import decimal import unittest @@ -80,6 +81,30 @@ def test_w_float_value(self): self.assertEqual(coerced, 3.1415) +class Test_decimal_from_json(unittest.TestCase): + + def _call_fut(self, value, field): + from google.cloud.bigquery._helpers import _decimal_from_json + + return _decimal_from_json(value, field) + + def test_w_none_nullable(self): + self.assertIsNone(self._call_fut(None, _Field('NULLABLE'))) + + def test_w_none_required(self): + with self.assertRaises(TypeError): + self._call_fut(None, _Field('REQUIRED')) + + def test_w_string_value(self): + coerced = self._call_fut('3.1415', object()) + self.assertEqual(coerced, decimal.Decimal('3.1415')) + + def test_w_float_value(self): + coerced = self._call_fut(3.1415, object()) + # There is no exact float representation of 3.1415. + self.assertEqual(coerced, decimal.Decimal(3.1415)) + + class Test_bool_from_json(unittest.TestCase): def _call_fut(self, value, field): @@ -585,6 +610,23 @@ def test_w_float(self): self.assertEqual(self._call_fut(1.23), 1.23) +class Test_decimal_to_json(unittest.TestCase): + + def _call_fut(self, value): + from google.cloud.bigquery._helpers import _decimal_to_json + + return _decimal_to_json(value) + + def test_w_float(self): + self.assertEqual(self._call_fut(1.23), 1.23) + + def test_w_string(self): + self.assertEqual(self._call_fut('1.23'), '1.23') + + def test_w_decimal(self): + self.assertEqual(self._call_fut(decimal.Decimal('1.23')), '1.23') + + class Test_bool_to_json(unittest.TestCase): def _call_fut(self, value): diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index 89f738a3ccb7..427f403e6be5 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -13,6 +13,7 @@ # limitations under the License. import copy +import decimal import email import io import json @@ -2825,6 +2826,52 @@ def test_insert_rows_errors(self): with self.assertRaises(TypeError): client.insert_rows(1, ROWS) + def test_insert_rows_w_numeric(self): + from google.cloud.bigquery import table + + project = 'PROJECT' + ds_id = 'DS_ID' + table_id = 'TABLE_ID' + creds = _make_credentials() + http = object() + client = self._make_one(project=project, credentials=creds, _http=http) + conn = client._connection = _make_connection({}) + table_ref = DatasetReference(project, ds_id).table(table_id) + schema = [ + table.SchemaField('account', 'STRING'), + table.SchemaField('balance', 'NUMERIC'), + ] + insert_table = table.Table(table_ref, schema=schema) + rows = [ + ('Savings', decimal.Decimal('23.47')), + ('Checking', decimal.Decimal('1.98')), + ('Mortgage', decimal.Decimal('-12345678909.87654321')), + ] + + with mock.patch('uuid.uuid4', side_effect=map(str, range(len(rows)))): + errors = client.insert_rows(insert_table, rows) + + self.assertEqual(len(errors), 0) + rows_json = [ + {'account': 'Savings', 'balance': '23.47'}, + {'account': 'Checking', 'balance': '1.98'}, + { + 'account': 'Mortgage', + 'balance': '-12345678909.87654321', + }, + ] + sent = { + 'rows': [{ + 'json': row, + 'insertId': str(i), + } for i, row in enumerate(rows_json)], + } + conn.api_request.assert_called_once_with( + method='POST', + path='/projects/{}/datasets/{}/tables/{}/insertAll'.format( + project, ds_id, table_id), + data=sent) + def test_insert_rows_json(self): from google.cloud.bigquery.table import Table, SchemaField from google.cloud.bigquery.dataset import DatasetReference diff --git a/bigquery/tests/unit/test_dbapi__helpers.py b/bigquery/tests/unit/test_dbapi__helpers.py index 78c5ea1ca18a..f0430f06a1e5 100644 --- a/bigquery/tests/unit/test_dbapi__helpers.py +++ b/bigquery/tests/unit/test_dbapi__helpers.py @@ -13,6 +13,7 @@ # limitations under the License. import datetime +import decimal import math import unittest @@ -30,6 +31,7 @@ def test_scalar_to_query_parameter(self): (123, 'INT64'), (-123456789, 'INT64'), (1.25, 'FLOAT64'), + (decimal.Decimal('1.25'), 'NUMERIC'), (b'I am some bytes', 'BYTES'), (u'I am a string', 'STRING'), (datetime.date(2017, 4, 1), 'DATE'), diff --git a/bigquery/tests/unit/test_query.py b/bigquery/tests/unit/test_query.py index bce6d2cd726a..c262132f8e0c 100644 --- a/bigquery/tests/unit/test_query.py +++ b/bigquery/tests/unit/test_query.py @@ -172,6 +172,20 @@ def test_to_api_repr_w_float(self): param = klass.positional(type_='FLOAT64', value=12.345) self.assertEqual(param.to_api_repr(), EXPECTED) + def test_to_api_repr_w_numeric(self): + EXPECTED = { + 'parameterType': { + 'type': 'NUMERIC', + }, + 'parameterValue': { + 'value': '123456789.123456789', + }, + } + klass = self._get_target_class() + param = klass.positional(type_='NUMERIC', + value='123456789.123456789') + self.assertEqual(param.to_api_repr(), EXPECTED) + def test_to_api_repr_w_bool(self): EXPECTED = { 'parameterType': { From b7b844412ec4b415ffc7b4f31671641fcd75a237 Mon Sep 17 00:00:00 2001 From: Thomas Date: Fri, 18 May 2018 19:21:35 +0100 Subject: [PATCH 14/75] Make dependency on logging less restrictive in error_reporting (#5345) --- error_reporting/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/error_reporting/setup.py b/error_reporting/setup.py index fa76d5166238..8fc1638fd35f 100644 --- a/error_reporting/setup.py +++ b/error_reporting/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 3 - Alpha' dependencies = [ - 'google-cloud-logging<1.5dev,>=1.4.0', + 'google-cloud-logging>=1.4.0, <2.0dev', ] extras = { } From 1785b6e5ae73dfb470bf0d9faad714ef3cd4e733 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Fri, 18 May 2018 15:17:23 -0400 Subject: [PATCH 15/75] Release 1.10.0 (#5346) --- storage/CHANGELOG.md | 9 +++++++++ storage/setup.py | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/storage/CHANGELOG.md b/storage/CHANGELOG.md index f3ca7ce65fed..002e92db7c09 100644 --- a/storage/CHANGELOG.md +++ b/storage/CHANGELOG.md @@ -4,6 +4,15 @@ [1]: https://pypi.org/project/google-cloud-storage/#history +## 1.10.0 + +### New Features +- Add support for KMS keys (#5259) +- Add '{Blob,Bucket}make_private' method (#5336) + +### Internal / Testing Changes +- Modify system tests to use prerelease versions of grpcio (#5304) + ## 1.9.0 ### Implementation Changes diff --git a/storage/setup.py b/storage/setup.py index 910a6d83ee22..a0ce64d4aca7 100644 --- a/storage/setup.py +++ b/storage/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-storage' description = 'Google Cloud Storage API client library' -version = '1.9.0' +version = '1.10.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 3261043ea1c46e8b2a006ef5422565f780bec5fa Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 21 May 2018 12:27:03 -0400 Subject: [PATCH 16/75] Normalize overflow handling for max count and bytes (#5343) * Remove exceptions for oversize messages. Toward #4608. * Normalize overflow handling for max count and bytes. Closes #4608. --- .../cloud/pubsub_v1/publisher/batch/base.py | 5 --- .../cloud/pubsub_v1/publisher/batch/thread.py | 36 +++++++++++-------- .../cloud/pubsub_v1/publisher/client.py | 6 ---- .../pubsub_v1/publisher/batch/test_base.py | 14 ++++---- .../pubsub_v1/publisher/batch/test_thread.py | 4 +-- .../publisher/test_publisher_client.py | 13 ------- 6 files changed, 31 insertions(+), 47 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py index dae0dafb9fd5..ac1f7ef7fe0e 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/base.py @@ -122,11 +122,6 @@ def will_accept(self, message): if self.status != BatchStatus.ACCEPTING_MESSAGES: return False - # If this message will make the batch exceed the ``max_bytes`` - # setting, return False. - if self.size + message.ByteSize() > self.settings.max_bytes: - return False - # If this message will make the batch exceed the ``max_messages`` # setting, return False. if len(self.messages) >= self.settings.max_messages: diff --git a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py index 64186b130e94..3b11bf32ef25 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/batch/thread.py @@ -280,25 +280,33 @@ def publish(self, message): if not isinstance(message, types.PubsubMessage): message = types.PubsubMessage(**message) + future = None + with self._state_lock: if not self.will_accept(message): - return None - - # Add the size to the running total of the size, so we know - # if future messages need to be rejected. - self._size += message.ByteSize() - # Store the actual message in the batch's message queue. - self._messages.append(message) - # Track the future on this batch (so that the result of the - # future can be set). - future = futures.Future(completed=threading.Event()) - self._futures.append(future) - # Determine the number of messages before releasing the lock. - num_messages = len(self._messages) + return future + + new_size = self._size + message.ByteSize() + new_count = len(self._messages) + 1 + overflow = ( + new_size > self.settings.max_bytes or + new_count >= self._settings.max_messages + ) + + if not self._messages or not overflow: + + # Store the actual message in the batch's message queue. + self._messages.append(message) + self._size = new_size + + # Track the future on this batch (so that the result of the + # future can be set). + future = futures.Future(completed=threading.Event()) + self._futures.append(future) # Try to commit, but it must be **without** the lock held, since # ``commit()`` will try to obtain the lock. - if num_messages >= self._settings.max_messages: + if overflow: self.commit() return future diff --git a/pubsub/google/cloud/pubsub_v1/publisher/client.py b/pubsub/google/cloud/pubsub_v1/publisher/client.py index 8fd91bcc9153..1aa1a279d393 100644 --- a/pubsub/google/cloud/pubsub_v1/publisher/client.py +++ b/pubsub/google/cloud/pubsub_v1/publisher/client.py @@ -199,12 +199,6 @@ def publish(self, topic, data, **attrs): # Create the Pub/Sub message object. message = types.PubsubMessage(data=data, attributes=attrs) - if message.ByteSize() > self.batch_settings.max_bytes: - raise ValueError( - 'Message being published is too large for the ' - 'batch settings with max bytes {}.'. - format(self.batch_settings.max_bytes) - ) # Delegate the publishing to the batch. batch = self.batch(topic) diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py index d4177e2f7d55..1c5dd7cfdaa3 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_base.py @@ -54,18 +54,18 @@ def test_will_accept(): assert batch.will_accept(message) is True -def test_will_not_accept_status(): - batch = create_batch(status='talk to the hand') - message = types.PubsubMessage() - assert batch.will_accept(message) is False - - -def test_will_not_accept_size(): +def test_will_accept_oversize(): batch = create_batch( settings=types.BatchSettings(max_bytes=10), status=BatchStatus.ACCEPTING_MESSAGES, ) message = types.PubsubMessage(data=b'abcdefghijklmnopqrstuvwxyz') + assert batch.will_accept(message) is True + + +def test_will_not_accept_status(): + batch = create_batch(status='talk to the hand') + message = types.PubsubMessage() assert batch.will_accept(message) is False diff --git a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py index fb62dbc6e550..bb50de5e0ec2 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/batch/test_thread.py @@ -304,12 +304,12 @@ def test_publish_exceed_max_messages(): assert commit.call_count == 0 # When a fourth message is published, commit should be called. + # No future will be returned in this case. future = batch.publish(types.PubsubMessage(data=b'last one')) commit.assert_called_once_with() - futures.append(future) + assert future is None assert batch._futures == futures - assert len(futures) == max_messages def test_publish_dict(): diff --git a/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py index 4fa144aa54b5..188d1c09950d 100644 --- a/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py +++ b/pubsub/tests/unit/pubsub_v1/publisher/test_publisher_client.py @@ -131,19 +131,6 @@ def test_publish_data_not_bytestring_error(): client.publish(topic, 42) -def test_publish_data_too_large(): - creds = mock.Mock(spec=credentials.Credentials) - client = publisher.Client(credentials=creds) - topic = 'topic/path' - client.batch_settings = types.BatchSettings( - 0, - client.batch_settings.max_latency, - client.batch_settings.max_messages - ) - with pytest.raises(ValueError): - client.publish(topic, b'This is a text string.') - - def test_publish_attrs_bytestring(): creds = mock.Mock(spec=credentials.Credentials) client = publisher.Client(credentials=creds) From 53ef8a2dbdc9dc54492c224d3b1c9a23eb75378b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Mon, 21 May 2018 15:46:47 -0400 Subject: [PATCH 17/75] Re-sync with .proto / .textproto files from google-cloud-common. (#5351) Regenerated 'test_pb2.py' by hacking the 'Makefile' to work around machine- specific bits (those changes not committed). Copied in additional '.textproto' files, bumping the count of testcases from 83 to 236. Still doesn't include updates to run tests for 'listen-*.textproto'. --- .../cloud/firestore_v1beta1/proto/test_pb2.py | 816 ++++++++++++++++-- .../unit/testdata/create-basic.textproto | 27 + .../unit/testdata/create-complex.textproto | 61 ++ .../create-del-noarray-nested.textproto | 13 + .../testdata/create-del-noarray.textproto | 13 + .../unit/testdata/create-empty.textproto | 20 + .../unit/testdata/create-nodel.textproto | 11 + .../unit/testdata/create-nosplit.textproto | 40 + .../testdata/create-special-chars.textproto | 41 + .../unit/testdata/create-st-alone.textproto | 26 + .../unit/testdata/create-st-multi.textproto | 41 + .../unit/testdata/create-st-nested.textproto | 38 + .../create-st-noarray-nested.textproto | 12 + .../unit/testdata/create-st-noarray.textproto | 12 + .../tests/unit/testdata/create-st.textproto | 39 + .../testdata/delete-exists-precond.textproto | 21 + .../unit/testdata/delete-no-precond.textproto | 15 + .../testdata/delete-time-precond.textproto | 25 + .../tests/unit/testdata/get-basic.textproto | 12 + .../testdata/listen-add-mod-del-add.textproto | 246 ++++++ .../unit/testdata/listen-add-one.textproto | 79 ++ .../unit/testdata/listen-add-three.textproto | 190 ++++ .../unit/testdata/listen-doc-remove.textproto | 115 +++ .../unit/testdata/listen-empty.textproto | 25 + .../unit/testdata/listen-filter-nop.textproto | 247 ++++++ .../unit/testdata/listen-multi-docs.textproto | 524 +++++++++++ .../unit/testdata/listen-nocurrent.textproto | 141 +++ .../unit/testdata/listen-nomod.textproto | 143 +++ .../listen-removed-target-ids.textproto | 131 +++ .../unit/testdata/listen-reset.textproto | 382 ++++++++ .../testdata/listen-target-add-nop.textproto | 88 ++ .../listen-target-add-wrong-id.textproto | 50 ++ .../testdata/listen-target-remove.textproto | 46 + .../unit/testdata/query-bad-NaN.textproto | 19 + .../unit/testdata/query-bad-null.textproto | 19 + .../query-cursor-docsnap-order.textproto | 68 ++ ...uery-cursor-docsnap-orderby-name.textproto | 76 ++ .../query-cursor-docsnap-where-eq.textproto | 53 ++ ...cursor-docsnap-where-neq-orderby.textproto | 72 ++ .../query-cursor-docsnap-where-neq.textproto | 64 ++ .../testdata/query-cursor-docsnap.textproto | 34 + .../testdata/query-cursor-no-order.textproto | 16 + .../testdata/query-cursor-vals-1a.textproto | 50 ++ .../testdata/query-cursor-vals-1b.textproto | 48 ++ .../testdata/query-cursor-vals-2.textproto | 71 ++ .../query-cursor-vals-docid.textproto | 50 ++ .../query-cursor-vals-last-wins.textproto | 60 ++ .../unit/testdata/query-del-cursor.textproto | 23 + .../unit/testdata/query-del-where.textproto | 19 + .../testdata/query-invalid-operator.textproto | 19 + .../query-invalid-path-order.textproto | 19 + .../query-invalid-path-select.textproto | 18 + .../query-invalid-path-where.textproto | 20 + .../query-offset-limit-last-wins.textproto | 30 + .../testdata/query-offset-limit.textproto | 24 + .../tests/unit/testdata/query-order.textproto | 42 + .../testdata/query-select-empty.textproto | 23 + .../testdata/query-select-last-wins.textproto | 36 + .../unit/testdata/query-select.textproto | 32 + .../unit/testdata/query-st-cursor.textproto | 23 + .../unit/testdata/query-st-where.textproto | 19 + .../unit/testdata/query-where-2.textproto | 59 ++ .../unit/testdata/query-where-NaN.textproto | 31 + .../unit/testdata/query-where-null.textproto | 31 + .../tests/unit/testdata/query-where.textproto | 34 + .../testdata/query-wrong-collection.textproto | 19 + .../tests/unit/testdata/set-basic.textproto | 24 + .../tests/unit/testdata/set-complex.textproto | 58 ++ .../testdata/set-del-merge-alone.textproto | 28 + .../unit/testdata/set-del-merge.textproto | 37 + .../unit/testdata/set-del-mergeall.textproto | 31 + .../testdata/set-del-noarray-nested.textproto | 13 + .../unit/testdata/set-del-noarray.textproto | 13 + .../unit/testdata/set-del-nomerge.textproto | 17 + .../unit/testdata/set-del-nonleaf.textproto | 19 + .../unit/testdata/set-del-wo-merge.textproto | 12 + .../tests/unit/testdata/set-empty.textproto | 17 + .../unit/testdata/set-merge-fp.textproto | 40 + .../unit/testdata/set-merge-nested.textproto | 41 + .../unit/testdata/set-merge-nonleaf.textproto | 46 + .../unit/testdata/set-merge-prefix.textproto | 21 + .../unit/testdata/set-merge-present.textproto | 20 + .../tests/unit/testdata/set-merge.textproto | 32 + .../testdata/set-mergeall-empty.textproto | 23 + .../testdata/set-mergeall-nested.textproto | 45 + .../unit/testdata/set-mergeall.textproto | 37 + .../tests/unit/testdata/set-nodel.textproto | 11 + .../tests/unit/testdata/set-nosplit.textproto | 37 + .../unit/testdata/set-special-chars.textproto | 38 + .../testdata/set-st-alone-mergeall.textproto | 26 + .../unit/testdata/set-st-alone.textproto | 28 + .../unit/testdata/set-st-merge-both.textproto | 45 + .../set-st-merge-nonleaf-alone.textproto | 37 + .../testdata/set-st-merge-nonleaf.textproto | 49 ++ .../testdata/set-st-merge-nowrite.textproto | 28 + .../unit/testdata/set-st-mergeall.textproto | 40 + .../unit/testdata/set-st-multi.textproto | 38 + .../unit/testdata/set-st-nested.textproto | 35 + .../testdata/set-st-noarray-nested.textproto | 12 + .../unit/testdata/set-st-noarray.textproto | 12 + .../unit/testdata/set-st-nomerge.textproto | 33 + .../tests/unit/testdata/set-st.textproto | 36 + .../tests/unit/testdata/test-suite.binproto | Bin 0 -> 38337 bytes .../unit/testdata/update-badchar.textproto | 12 + .../unit/testdata/update-basic.textproto | 30 + .../unit/testdata/update-complex.textproto | 65 ++ .../unit/testdata/update-del-alone.textproto | 25 + .../unit/testdata/update-del-dot.textproto | 46 + .../unit/testdata/update-del-nested.textproto | 11 + .../update-del-noarray-nested.textproto | 13 + .../testdata/update-del-noarray.textproto | 13 + .../tests/unit/testdata/update-del.textproto | 32 + .../testdata/update-exists-precond.textproto | 14 + .../update-fp-empty-component.textproto | 11 + .../unit/testdata/update-no-paths.textproto | 11 + .../testdata/update-paths-basic.textproto | 33 + .../testdata/update-paths-complex.textproto | 72 ++ .../testdata/update-paths-del-alone.textproto | 28 + .../update-paths-del-nested.textproto | 14 + .../update-paths-del-noarray-nested.textproto | 16 + .../update-paths-del-noarray.textproto | 16 + .../unit/testdata/update-paths-del.textproto | 39 + .../update-paths-exists-precond.textproto | 17 + .../testdata/update-paths-fp-del.textproto | 47 + .../testdata/update-paths-fp-dup.textproto | 22 + .../update-paths-fp-empty-component.textproto | 15 + .../testdata/update-paths-fp-empty.textproto | 13 + .../testdata/update-paths-fp-multi.textproto | 42 + .../update-paths-fp-nosplit.textproto | 48 ++ .../testdata/update-paths-no-paths.textproto | 10 + .../testdata/update-paths-prefix-1.textproto | 19 + .../testdata/update-paths-prefix-2.textproto | 19 + .../testdata/update-paths-prefix-3.textproto | 20 + .../update-paths-special-chars.textproto | 53 ++ .../testdata/update-paths-st-alone.textproto | 29 + .../testdata/update-paths-st-multi.textproto | 56 ++ .../testdata/update-paths-st-nested.textproto | 49 ++ .../update-paths-st-noarray-nested.textproto | 15 + .../update-paths-st-noarray.textproto | 15 + .../unit/testdata/update-paths-st.textproto | 49 ++ .../testdata/update-paths-uptime.textproto | 40 + .../unit/testdata/update-prefix-1.textproto | 11 + .../unit/testdata/update-prefix-2.textproto | 11 + .../unit/testdata/update-prefix-3.textproto | 12 + .../unit/testdata/update-quoting.textproto | 45 + .../testdata/update-split-top-level.textproto | 45 + .../unit/testdata/update-split.textproto | 44 + .../unit/testdata/update-st-alone.textproto | 26 + .../unit/testdata/update-st-dot.textproto | 27 + .../unit/testdata/update-st-multi.textproto | 49 ++ .../unit/testdata/update-st-nested.textproto | 42 + .../update-st-noarray-nested.textproto | 12 + .../unit/testdata/update-st-noarray.textproto | 12 + .../tests/unit/testdata/update-st.textproto | 42 + .../unit/testdata/update-uptime.textproto | 37 + 155 files changed, 7426 insertions(+), 58 deletions(-) create mode 100644 firestore/tests/unit/testdata/create-basic.textproto create mode 100644 firestore/tests/unit/testdata/create-complex.textproto create mode 100644 firestore/tests/unit/testdata/create-del-noarray-nested.textproto create mode 100644 firestore/tests/unit/testdata/create-del-noarray.textproto create mode 100644 firestore/tests/unit/testdata/create-empty.textproto create mode 100644 firestore/tests/unit/testdata/create-nodel.textproto create mode 100644 firestore/tests/unit/testdata/create-nosplit.textproto create mode 100644 firestore/tests/unit/testdata/create-special-chars.textproto create mode 100644 firestore/tests/unit/testdata/create-st-alone.textproto create mode 100644 firestore/tests/unit/testdata/create-st-multi.textproto create mode 100644 firestore/tests/unit/testdata/create-st-nested.textproto create mode 100644 firestore/tests/unit/testdata/create-st-noarray-nested.textproto create mode 100644 firestore/tests/unit/testdata/create-st-noarray.textproto create mode 100644 firestore/tests/unit/testdata/create-st.textproto create mode 100644 firestore/tests/unit/testdata/delete-exists-precond.textproto create mode 100644 firestore/tests/unit/testdata/delete-no-precond.textproto create mode 100644 firestore/tests/unit/testdata/delete-time-precond.textproto create mode 100644 firestore/tests/unit/testdata/get-basic.textproto create mode 100644 firestore/tests/unit/testdata/listen-add-mod-del-add.textproto create mode 100644 firestore/tests/unit/testdata/listen-add-one.textproto create mode 100644 firestore/tests/unit/testdata/listen-add-three.textproto create mode 100644 firestore/tests/unit/testdata/listen-doc-remove.textproto create mode 100644 firestore/tests/unit/testdata/listen-empty.textproto create mode 100644 firestore/tests/unit/testdata/listen-filter-nop.textproto create mode 100644 firestore/tests/unit/testdata/listen-multi-docs.textproto create mode 100644 firestore/tests/unit/testdata/listen-nocurrent.textproto create mode 100644 firestore/tests/unit/testdata/listen-nomod.textproto create mode 100644 firestore/tests/unit/testdata/listen-removed-target-ids.textproto create mode 100644 firestore/tests/unit/testdata/listen-reset.textproto create mode 100644 firestore/tests/unit/testdata/listen-target-add-nop.textproto create mode 100644 firestore/tests/unit/testdata/listen-target-add-wrong-id.textproto create mode 100644 firestore/tests/unit/testdata/listen-target-remove.textproto create mode 100644 firestore/tests/unit/testdata/query-bad-NaN.textproto create mode 100644 firestore/tests/unit/testdata/query-bad-null.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-docsnap-order.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-docsnap-orderby-name.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-docsnap-where-eq.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-docsnap-where-neq-orderby.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-docsnap-where-neq.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-docsnap.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-no-order.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-vals-1a.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-vals-1b.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-vals-2.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-vals-docid.textproto create mode 100644 firestore/tests/unit/testdata/query-cursor-vals-last-wins.textproto create mode 100644 firestore/tests/unit/testdata/query-del-cursor.textproto create mode 100644 firestore/tests/unit/testdata/query-del-where.textproto create mode 100644 firestore/tests/unit/testdata/query-invalid-operator.textproto create mode 100644 firestore/tests/unit/testdata/query-invalid-path-order.textproto create mode 100644 firestore/tests/unit/testdata/query-invalid-path-select.textproto create mode 100644 firestore/tests/unit/testdata/query-invalid-path-where.textproto create mode 100644 firestore/tests/unit/testdata/query-offset-limit-last-wins.textproto create mode 100644 firestore/tests/unit/testdata/query-offset-limit.textproto create mode 100644 firestore/tests/unit/testdata/query-order.textproto create mode 100644 firestore/tests/unit/testdata/query-select-empty.textproto create mode 100644 firestore/tests/unit/testdata/query-select-last-wins.textproto create mode 100644 firestore/tests/unit/testdata/query-select.textproto create mode 100644 firestore/tests/unit/testdata/query-st-cursor.textproto create mode 100644 firestore/tests/unit/testdata/query-st-where.textproto create mode 100644 firestore/tests/unit/testdata/query-where-2.textproto create mode 100644 firestore/tests/unit/testdata/query-where-NaN.textproto create mode 100644 firestore/tests/unit/testdata/query-where-null.textproto create mode 100644 firestore/tests/unit/testdata/query-where.textproto create mode 100644 firestore/tests/unit/testdata/query-wrong-collection.textproto create mode 100644 firestore/tests/unit/testdata/set-basic.textproto create mode 100644 firestore/tests/unit/testdata/set-complex.textproto create mode 100644 firestore/tests/unit/testdata/set-del-merge-alone.textproto create mode 100644 firestore/tests/unit/testdata/set-del-merge.textproto create mode 100644 firestore/tests/unit/testdata/set-del-mergeall.textproto create mode 100644 firestore/tests/unit/testdata/set-del-noarray-nested.textproto create mode 100644 firestore/tests/unit/testdata/set-del-noarray.textproto create mode 100644 firestore/tests/unit/testdata/set-del-nomerge.textproto create mode 100644 firestore/tests/unit/testdata/set-del-nonleaf.textproto create mode 100644 firestore/tests/unit/testdata/set-del-wo-merge.textproto create mode 100644 firestore/tests/unit/testdata/set-empty.textproto create mode 100644 firestore/tests/unit/testdata/set-merge-fp.textproto create mode 100644 firestore/tests/unit/testdata/set-merge-nested.textproto create mode 100644 firestore/tests/unit/testdata/set-merge-nonleaf.textproto create mode 100644 firestore/tests/unit/testdata/set-merge-prefix.textproto create mode 100644 firestore/tests/unit/testdata/set-merge-present.textproto create mode 100644 firestore/tests/unit/testdata/set-merge.textproto create mode 100644 firestore/tests/unit/testdata/set-mergeall-empty.textproto create mode 100644 firestore/tests/unit/testdata/set-mergeall-nested.textproto create mode 100644 firestore/tests/unit/testdata/set-mergeall.textproto create mode 100644 firestore/tests/unit/testdata/set-nodel.textproto create mode 100644 firestore/tests/unit/testdata/set-nosplit.textproto create mode 100644 firestore/tests/unit/testdata/set-special-chars.textproto create mode 100644 firestore/tests/unit/testdata/set-st-alone-mergeall.textproto create mode 100644 firestore/tests/unit/testdata/set-st-alone.textproto create mode 100644 firestore/tests/unit/testdata/set-st-merge-both.textproto create mode 100644 firestore/tests/unit/testdata/set-st-merge-nonleaf-alone.textproto create mode 100644 firestore/tests/unit/testdata/set-st-merge-nonleaf.textproto create mode 100644 firestore/tests/unit/testdata/set-st-merge-nowrite.textproto create mode 100644 firestore/tests/unit/testdata/set-st-mergeall.textproto create mode 100644 firestore/tests/unit/testdata/set-st-multi.textproto create mode 100644 firestore/tests/unit/testdata/set-st-nested.textproto create mode 100644 firestore/tests/unit/testdata/set-st-noarray-nested.textproto create mode 100644 firestore/tests/unit/testdata/set-st-noarray.textproto create mode 100644 firestore/tests/unit/testdata/set-st-nomerge.textproto create mode 100644 firestore/tests/unit/testdata/set-st.textproto create mode 100644 firestore/tests/unit/testdata/test-suite.binproto create mode 100644 firestore/tests/unit/testdata/update-badchar.textproto create mode 100644 firestore/tests/unit/testdata/update-basic.textproto create mode 100644 firestore/tests/unit/testdata/update-complex.textproto create mode 100644 firestore/tests/unit/testdata/update-del-alone.textproto create mode 100644 firestore/tests/unit/testdata/update-del-dot.textproto create mode 100644 firestore/tests/unit/testdata/update-del-nested.textproto create mode 100644 firestore/tests/unit/testdata/update-del-noarray-nested.textproto create mode 100644 firestore/tests/unit/testdata/update-del-noarray.textproto create mode 100644 firestore/tests/unit/testdata/update-del.textproto create mode 100644 firestore/tests/unit/testdata/update-exists-precond.textproto create mode 100644 firestore/tests/unit/testdata/update-fp-empty-component.textproto create mode 100644 firestore/tests/unit/testdata/update-no-paths.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-basic.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-complex.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-del-alone.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-del-nested.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-del-noarray-nested.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-del-noarray.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-del.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-exists-precond.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-fp-del.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-fp-dup.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-fp-empty-component.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-fp-empty.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-fp-multi.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-fp-nosplit.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-no-paths.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-prefix-1.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-prefix-2.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-prefix-3.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-special-chars.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-st-alone.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-st-multi.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-st-nested.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-st-noarray-nested.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-st-noarray.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-st.textproto create mode 100644 firestore/tests/unit/testdata/update-paths-uptime.textproto create mode 100644 firestore/tests/unit/testdata/update-prefix-1.textproto create mode 100644 firestore/tests/unit/testdata/update-prefix-2.textproto create mode 100644 firestore/tests/unit/testdata/update-prefix-3.textproto create mode 100644 firestore/tests/unit/testdata/update-quoting.textproto create mode 100644 firestore/tests/unit/testdata/update-split-top-level.textproto create mode 100644 firestore/tests/unit/testdata/update-split.textproto create mode 100644 firestore/tests/unit/testdata/update-st-alone.textproto create mode 100644 firestore/tests/unit/testdata/update-st-dot.textproto create mode 100644 firestore/tests/unit/testdata/update-st-multi.textproto create mode 100644 firestore/tests/unit/testdata/update-st-nested.textproto create mode 100644 firestore/tests/unit/testdata/update-st-noarray-nested.textproto create mode 100644 firestore/tests/unit/testdata/update-st-noarray.textproto create mode 100644 firestore/tests/unit/testdata/update-st.textproto create mode 100644 firestore/tests/unit/testdata/update-uptime.textproto diff --git a/firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py b/firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py index e7359d1e26bb..fb451d0031ef 100644 --- a/firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py +++ b/firestore/google/cloud/firestore_v1beta1/proto/test_pb2.py @@ -13,20 +13,83 @@ _sym_db = _symbol_database.Default() -from google.cloud.firestore_v1beta1.proto import firestore_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2 from google.cloud.firestore_v1beta1.proto import common_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2 +from google.cloud.firestore_v1beta1.proto import document_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2 +from google.cloud.firestore_v1beta1.proto import firestore_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2 +from google.cloud.firestore_v1beta1.proto import query_pb2 as google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='test.proto', package='tests', syntax='proto3', - serialized_pb=_b('\n\ntest.proto\x12\x05tests\x1a\x34google/cloud/firestore_v1beta1/proto/firestore.proto\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\"\x80\x02\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x1d\n\x03get\x18\x02 \x01(\x0b\x32\x0e.tests.GetTestH\x00\x12#\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x11.tests.CreateTestH\x00\x12\x1d\n\x03set\x18\x04 \x01(\x0b\x32\x0e.tests.SetTestH\x00\x12#\n\x06update\x18\x05 \x01(\x0b\x32\x11.tests.UpdateTestH\x00\x12.\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x16.tests.UpdatePathsTestH\x00\x12#\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x11.tests.DeleteTestH\x00\x42\x06\n\x04test\"^\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12=\n\x07request\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.GetDocumentRequest\"\x81\x01\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08\"\xa0\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12 \n\x06option\x18\x02 \x01(\x0b\x32\x10.tests.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08\"\xbf\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08\"\xed\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12%\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x10.tests.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x38\n\x07request\x18\x05 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08\"\xac\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08\":\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12 \n\x06\x66ields\x18\x02 \x03(\x0b\x32\x10.tests.FieldPath\"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\tb\x06proto3') + serialized_pb=_b('\n\ntest.proto\x12\x05tests\x1a\x31google/cloud/firestore_v1beta1/proto/common.proto\x1a\x33google/cloud/firestore_v1beta1/proto/document.proto\x1a\x34google/cloud/firestore_v1beta1/proto/firestore.proto\x1a\x30google/cloud/firestore_v1beta1/proto/query.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\'\n\tTestSuite\x12\x1a\n\x05tests\x18\x01 \x03(\x0b\x32\x0b.tests.Test\"\xc8\x02\n\x04Test\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12\x1d\n\x03get\x18\x02 \x01(\x0b\x32\x0e.tests.GetTestH\x00\x12#\n\x06\x63reate\x18\x03 \x01(\x0b\x32\x11.tests.CreateTestH\x00\x12\x1d\n\x03set\x18\x04 \x01(\x0b\x32\x0e.tests.SetTestH\x00\x12#\n\x06update\x18\x05 \x01(\x0b\x32\x11.tests.UpdateTestH\x00\x12.\n\x0cupdate_paths\x18\x06 \x01(\x0b\x32\x16.tests.UpdatePathsTestH\x00\x12#\n\x06\x64\x65lete\x18\x07 \x01(\x0b\x32\x11.tests.DeleteTestH\x00\x12!\n\x05query\x18\x08 \x01(\x0b\x32\x10.tests.QueryTestH\x00\x12#\n\x06listen\x18\t \x01(\x0b\x32\x11.tests.ListenTestH\x00\x42\x06\n\x04test\"^\n\x07GetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12=\n\x07request\x18\x02 \x01(\x0b\x32,.google.firestore.v1beta1.GetDocumentRequest\"\x81\x01\n\nCreateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08\"\xa0\x01\n\x07SetTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12 \n\x06option\x18\x02 \x01(\x0b\x32\x10.tests.SetOption\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08\"\xbf\x01\n\nUpdateTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x11\n\tjson_data\x18\x03 \x01(\t\x12\x38\n\x07request\x18\x04 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x05 \x01(\x08\"\xed\x01\n\x0fUpdatePathsTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12%\n\x0b\x66ield_paths\x18\x03 \x03(\x0b\x32\x10.tests.FieldPath\x12\x13\n\x0bjson_values\x18\x04 \x03(\t\x12\x38\n\x07request\x18\x05 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x06 \x01(\x08\"\xac\x01\n\nDeleteTest\x12\x14\n\x0c\x64oc_ref_path\x18\x01 \x01(\t\x12<\n\x0cprecondition\x18\x02 \x01(\x0b\x32&.google.firestore.v1beta1.Precondition\x12\x38\n\x07request\x18\x03 \x01(\x0b\x32\'.google.firestore.v1beta1.CommitRequest\x12\x10\n\x08is_error\x18\x04 \x01(\x08\":\n\tSetOption\x12\x0b\n\x03\x61ll\x18\x01 \x01(\x08\x12 \n\x06\x66ields\x18\x02 \x03(\x0b\x32\x10.tests.FieldPath\"\x8a\x01\n\tQueryTest\x12\x11\n\tcoll_path\x18\x01 \x01(\t\x12\x1e\n\x07\x63lauses\x18\x02 \x03(\x0b\x32\r.tests.Clause\x12\x38\n\x05query\x18\x03 \x01(\x0b\x32).google.firestore.v1beta1.StructuredQuery\x12\x10\n\x08is_error\x18\x04 \x01(\x08\"\xa8\x02\n\x06\x43lause\x12\x1f\n\x06select\x18\x01 \x01(\x0b\x32\r.tests.SelectH\x00\x12\x1d\n\x05where\x18\x02 \x01(\x0b\x32\x0c.tests.WhereH\x00\x12\"\n\x08order_by\x18\x03 \x01(\x0b\x32\x0e.tests.OrderByH\x00\x12\x10\n\x06offset\x18\x04 \x01(\x05H\x00\x12\x0f\n\x05limit\x18\x05 \x01(\x05H\x00\x12!\n\x08start_at\x18\x06 \x01(\x0b\x32\r.tests.CursorH\x00\x12$\n\x0bstart_after\x18\x07 \x01(\x0b\x32\r.tests.CursorH\x00\x12\x1f\n\x06\x65nd_at\x18\x08 \x01(\x0b\x32\r.tests.CursorH\x00\x12#\n\nend_before\x18\t \x01(\x0b\x32\r.tests.CursorH\x00\x42\x08\n\x06\x63lause\"*\n\x06Select\x12 \n\x06\x66ields\x18\x01 \x03(\x0b\x32\x10.tests.FieldPath\"G\n\x05Where\x12\x1e\n\x04path\x18\x01 \x01(\x0b\x32\x10.tests.FieldPath\x12\n\n\x02op\x18\x02 \x01(\t\x12\x12\n\njson_value\x18\x03 \x01(\t\"<\n\x07OrderBy\x12\x1e\n\x04path\x18\x01 \x01(\x0b\x32\x10.tests.FieldPath\x12\x11\n\tdirection\x18\x02 \x01(\t\"G\n\x06\x43ursor\x12(\n\x0c\x64oc_snapshot\x18\x01 \x01(\x0b\x32\x12.tests.DocSnapshot\x12\x13\n\x0bjson_values\x18\x02 \x03(\t\".\n\x0b\x44ocSnapshot\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x11\n\tjson_data\x18\x02 \x01(\t\"\x1a\n\tFieldPath\x12\r\n\x05\x66ield\x18\x01 \x03(\t\"\x7f\n\nListenTest\x12;\n\tresponses\x18\x01 \x03(\x0b\x32(.google.firestore.v1beta1.ListenResponse\x12\"\n\tsnapshots\x18\x02 \x03(\x0b\x32\x0f.tests.Snapshot\x12\x10\n\x08is_error\x18\x03 \x01(\x08\"\x8e\x01\n\x08Snapshot\x12\x30\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\".google.firestore.v1beta1.Document\x12!\n\x07\x63hanges\x18\x02 \x03(\x0b\x32\x10.tests.DocChange\x12-\n\tread_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xcb\x01\n\tDocChange\x12#\n\x04kind\x18\x01 \x01(\x0e\x32\x15.tests.DocChange.Kind\x12/\n\x03\x64oc\x18\x02 \x01(\x0b\x32\".google.firestore.v1beta1.Document\x12\x11\n\told_index\x18\x03 \x01(\x05\x12\x11\n\tnew_index\x18\x04 \x01(\x05\"B\n\x04Kind\x12\x14\n\x10KIND_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x44\x44\x45\x44\x10\x01\x12\x0b\n\x07REMOVED\x10\x02\x12\x0c\n\x08MODIFIED\x10\x03\x42x\n&com.google.cloud.firestore.conformance\xaa\x02\"Google.Cloud.Firestore.Tests.Proto\xca\x02(Google\\Cloud\\Firestore\\Tests\\Conformanceb\x06proto3') , - dependencies=[google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) + dependencies=[google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2.DESCRIPTOR,google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) + +_DOCCHANGE_KIND = _descriptor.EnumDescriptor( + name='Kind', + full_name='tests.DocChange.Kind', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='KIND_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ADDED', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REMOVED', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MODIFIED', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2874, + serialized_end=2940, +) +_sym_db.RegisterEnumDescriptor(_DOCCHANGE_KIND) + + +_TESTSUITE = _descriptor.Descriptor( + name='TestSuite', + full_name='tests.TestSuite', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tests', full_name='tests.TestSuite.tests', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=262, + serialized_end=301, +) _TEST = _descriptor.Descriptor( @@ -42,49 +105,63 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='get', full_name='tests.Test.get', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='create', full_name='tests.Test.create', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='set', full_name='tests.Test.set', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='update', full_name='tests.Test.update', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='update_paths', full_name='tests.Test.update_paths', index=5, number=6, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='delete', full_name='tests.Test.delete', index=6, number=7, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='query', full_name='tests.Test.query', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='listen', full_name='tests.Test.listen', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -100,8 +177,8 @@ name='test', full_name='tests.Test.test', index=0, containing_type=None, fields=[]), ], - serialized_start=127, - serialized_end=383, + serialized_start=304, + serialized_end=632, ) @@ -118,14 +195,14 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='request', full_name='tests.GetTest.request', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -138,8 +215,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=385, - serialized_end=479, + serialized_start=634, + serialized_end=728, ) @@ -156,28 +233,28 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='json_data', full_name='tests.CreateTest.json_data', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='request', full_name='tests.CreateTest.request', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='is_error', full_name='tests.CreateTest.is_error', index=3, number=4, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -190,8 +267,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=482, - serialized_end=611, + serialized_start=731, + serialized_end=860, ) @@ -208,35 +285,35 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='option', full_name='tests.SetTest.option', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='json_data', full_name='tests.SetTest.json_data', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='request', full_name='tests.SetTest.request', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='is_error', full_name='tests.SetTest.is_error', index=4, number=5, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -249,8 +326,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=614, - serialized_end=774, + serialized_start=863, + serialized_end=1023, ) @@ -267,35 +344,35 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='precondition', full_name='tests.UpdateTest.precondition', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='json_data', full_name='tests.UpdateTest.json_data', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='request', full_name='tests.UpdateTest.request', index=3, number=4, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='is_error', full_name='tests.UpdateTest.is_error', index=4, number=5, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -308,8 +385,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=777, - serialized_end=968, + serialized_start=1026, + serialized_end=1217, ) @@ -326,42 +403,42 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='precondition', full_name='tests.UpdatePathsTest.precondition', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='field_paths', full_name='tests.UpdatePathsTest.field_paths', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='json_values', full_name='tests.UpdatePathsTest.json_values', index=3, number=4, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='request', full_name='tests.UpdatePathsTest.request', index=4, number=5, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='is_error', full_name='tests.UpdatePathsTest.is_error', index=5, number=6, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -374,8 +451,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=971, - serialized_end=1208, + serialized_start=1220, + serialized_end=1457, ) @@ -392,28 +469,28 @@ has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='precondition', full_name='tests.DeleteTest.precondition', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='request', full_name='tests.DeleteTest.request', index=2, number=3, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='is_error', full_name='tests.DeleteTest.is_error', index=3, number=4, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -426,8 +503,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1211, - serialized_end=1383, + serialized_start=1460, + serialized_end=1632, ) @@ -444,14 +521,66 @@ has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='fields', full_name='tests.SetOption.fields', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1634, + serialized_end=1692, +) + + +_QUERYTEST = _descriptor.Descriptor( + name='QueryTest', + full_name='tests.QueryTest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='coll_path', full_name='tests.QueryTest.coll_path', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='clauses', full_name='tests.QueryTest.clauses', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='query', full_name='tests.QueryTest.query', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='is_error', full_name='tests.QueryTest.is_error', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -464,8 +593,288 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1385, - serialized_end=1443, + serialized_start=1695, + serialized_end=1833, +) + + +_CLAUSE = _descriptor.Descriptor( + name='Clause', + full_name='tests.Clause', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='select', full_name='tests.Clause.select', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='where', full_name='tests.Clause.where', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='order_by', full_name='tests.Clause.order_by', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='offset', full_name='tests.Clause.offset', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='limit', full_name='tests.Clause.limit', index=4, + number=5, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='start_at', full_name='tests.Clause.start_at', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='start_after', full_name='tests.Clause.start_after', index=6, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='end_at', full_name='tests.Clause.end_at', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='end_before', full_name='tests.Clause.end_before', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='clause', full_name='tests.Clause.clause', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1836, + serialized_end=2132, +) + + +_SELECT = _descriptor.Descriptor( + name='Select', + full_name='tests.Select', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='fields', full_name='tests.Select.fields', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2134, + serialized_end=2176, +) + + +_WHERE = _descriptor.Descriptor( + name='Where', + full_name='tests.Where', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='tests.Where.path', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='op', full_name='tests.Where.op', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='json_value', full_name='tests.Where.json_value', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2178, + serialized_end=2249, +) + + +_ORDERBY = _descriptor.Descriptor( + name='OrderBy', + full_name='tests.OrderBy', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='tests.OrderBy.path', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='direction', full_name='tests.OrderBy.direction', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2251, + serialized_end=2311, +) + + +_CURSOR = _descriptor.Descriptor( + name='Cursor', + full_name='tests.Cursor', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='doc_snapshot', full_name='tests.Cursor.doc_snapshot', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='json_values', full_name='tests.Cursor.json_values', index=1, + number=2, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2313, + serialized_end=2384, +) + + +_DOCSNAPSHOT = _descriptor.Descriptor( + name='DocSnapshot', + full_name='tests.DocSnapshot', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='path', full_name='tests.DocSnapshot.path', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='json_data', full_name='tests.DocSnapshot.json_data', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2386, + serialized_end=2432, ) @@ -482,7 +891,97 @@ has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - options=None), + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2434, + serialized_end=2460, +) + + +_LISTENTEST = _descriptor.Descriptor( + name='ListenTest', + full_name='tests.ListenTest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='responses', full_name='tests.ListenTest.responses', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='snapshots', full_name='tests.ListenTest.snapshots', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='is_error', full_name='tests.ListenTest.is_error', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2462, + serialized_end=2589, +) + + +_SNAPSHOT = _descriptor.Descriptor( + name='Snapshot', + full_name='tests.Snapshot', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='docs', full_name='tests.Snapshot.docs', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='changes', full_name='tests.Snapshot.changes', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='read_time', full_name='tests.Snapshot.read_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -495,16 +994,72 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1445, - serialized_end=1471, + serialized_start=2592, + serialized_end=2734, ) + +_DOCCHANGE = _descriptor.Descriptor( + name='DocChange', + full_name='tests.DocChange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='kind', full_name='tests.DocChange.kind', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='doc', full_name='tests.DocChange.doc', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='old_index', full_name='tests.DocChange.old_index', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='new_index', full_name='tests.DocChange.new_index', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DOCCHANGE_KIND, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2737, + serialized_end=2940, +) + +_TESTSUITE.fields_by_name['tests'].message_type = _TEST _TEST.fields_by_name['get'].message_type = _GETTEST _TEST.fields_by_name['create'].message_type = _CREATETEST _TEST.fields_by_name['set'].message_type = _SETTEST _TEST.fields_by_name['update'].message_type = _UPDATETEST _TEST.fields_by_name['update_paths'].message_type = _UPDATEPATHSTEST _TEST.fields_by_name['delete'].message_type = _DELETETEST +_TEST.fields_by_name['query'].message_type = _QUERYTEST +_TEST.fields_by_name['listen'].message_type = _LISTENTEST _TEST.oneofs_by_name['test'].fields.append( _TEST.fields_by_name['get']) _TEST.fields_by_name['get'].containing_oneof = _TEST.oneofs_by_name['test'] @@ -523,6 +1078,12 @@ _TEST.oneofs_by_name['test'].fields.append( _TEST.fields_by_name['delete']) _TEST.fields_by_name['delete'].containing_oneof = _TEST.oneofs_by_name['test'] +_TEST.oneofs_by_name['test'].fields.append( + _TEST.fields_by_name['query']) +_TEST.fields_by_name['query'].containing_oneof = _TEST.oneofs_by_name['test'] +_TEST.oneofs_by_name['test'].fields.append( + _TEST.fields_by_name['listen']) +_TEST.fields_by_name['listen'].containing_oneof = _TEST.oneofs_by_name['test'] _GETTEST.fields_by_name['request'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._GETDOCUMENTREQUEST _CREATETEST.fields_by_name['request'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST _SETTEST.fields_by_name['option'].message_type = _SETOPTION @@ -535,6 +1096,55 @@ _DELETETEST.fields_by_name['precondition'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_common__pb2._PRECONDITION _DELETETEST.fields_by_name['request'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._COMMITREQUEST _SETOPTION.fields_by_name['fields'].message_type = _FIELDPATH +_QUERYTEST.fields_by_name['clauses'].message_type = _CLAUSE +_QUERYTEST.fields_by_name['query'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_query__pb2._STRUCTUREDQUERY +_CLAUSE.fields_by_name['select'].message_type = _SELECT +_CLAUSE.fields_by_name['where'].message_type = _WHERE +_CLAUSE.fields_by_name['order_by'].message_type = _ORDERBY +_CLAUSE.fields_by_name['start_at'].message_type = _CURSOR +_CLAUSE.fields_by_name['start_after'].message_type = _CURSOR +_CLAUSE.fields_by_name['end_at'].message_type = _CURSOR +_CLAUSE.fields_by_name['end_before'].message_type = _CURSOR +_CLAUSE.oneofs_by_name['clause'].fields.append( + _CLAUSE.fields_by_name['select']) +_CLAUSE.fields_by_name['select'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] +_CLAUSE.oneofs_by_name['clause'].fields.append( + _CLAUSE.fields_by_name['where']) +_CLAUSE.fields_by_name['where'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] +_CLAUSE.oneofs_by_name['clause'].fields.append( + _CLAUSE.fields_by_name['order_by']) +_CLAUSE.fields_by_name['order_by'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] +_CLAUSE.oneofs_by_name['clause'].fields.append( + _CLAUSE.fields_by_name['offset']) +_CLAUSE.fields_by_name['offset'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] +_CLAUSE.oneofs_by_name['clause'].fields.append( + _CLAUSE.fields_by_name['limit']) +_CLAUSE.fields_by_name['limit'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] +_CLAUSE.oneofs_by_name['clause'].fields.append( + _CLAUSE.fields_by_name['start_at']) +_CLAUSE.fields_by_name['start_at'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] +_CLAUSE.oneofs_by_name['clause'].fields.append( + _CLAUSE.fields_by_name['start_after']) +_CLAUSE.fields_by_name['start_after'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] +_CLAUSE.oneofs_by_name['clause'].fields.append( + _CLAUSE.fields_by_name['end_at']) +_CLAUSE.fields_by_name['end_at'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] +_CLAUSE.oneofs_by_name['clause'].fields.append( + _CLAUSE.fields_by_name['end_before']) +_CLAUSE.fields_by_name['end_before'].containing_oneof = _CLAUSE.oneofs_by_name['clause'] +_SELECT.fields_by_name['fields'].message_type = _FIELDPATH +_WHERE.fields_by_name['path'].message_type = _FIELDPATH +_ORDERBY.fields_by_name['path'].message_type = _FIELDPATH +_CURSOR.fields_by_name['doc_snapshot'].message_type = _DOCSNAPSHOT +_LISTENTEST.fields_by_name['responses'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_firestore__pb2._LISTENRESPONSE +_LISTENTEST.fields_by_name['snapshots'].message_type = _SNAPSHOT +_SNAPSHOT.fields_by_name['docs'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +_SNAPSHOT.fields_by_name['changes'].message_type = _DOCCHANGE +_SNAPSHOT.fields_by_name['read_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DOCCHANGE.fields_by_name['kind'].enum_type = _DOCCHANGE_KIND +_DOCCHANGE.fields_by_name['doc'].message_type = google_dot_cloud_dot_firestore__v1beta1_dot_proto_dot_document__pb2._DOCUMENT +_DOCCHANGE_KIND.containing_type = _DOCCHANGE +DESCRIPTOR.message_types_by_name['TestSuite'] = _TESTSUITE DESCRIPTOR.message_types_by_name['Test'] = _TEST DESCRIPTOR.message_types_by_name['GetTest'] = _GETTEST DESCRIPTOR.message_types_by_name['CreateTest'] = _CREATETEST @@ -543,7 +1153,25 @@ DESCRIPTOR.message_types_by_name['UpdatePathsTest'] = _UPDATEPATHSTEST DESCRIPTOR.message_types_by_name['DeleteTest'] = _DELETETEST DESCRIPTOR.message_types_by_name['SetOption'] = _SETOPTION +DESCRIPTOR.message_types_by_name['QueryTest'] = _QUERYTEST +DESCRIPTOR.message_types_by_name['Clause'] = _CLAUSE +DESCRIPTOR.message_types_by_name['Select'] = _SELECT +DESCRIPTOR.message_types_by_name['Where'] = _WHERE +DESCRIPTOR.message_types_by_name['OrderBy'] = _ORDERBY +DESCRIPTOR.message_types_by_name['Cursor'] = _CURSOR +DESCRIPTOR.message_types_by_name['DocSnapshot'] = _DOCSNAPSHOT DESCRIPTOR.message_types_by_name['FieldPath'] = _FIELDPATH +DESCRIPTOR.message_types_by_name['ListenTest'] = _LISTENTEST +DESCRIPTOR.message_types_by_name['Snapshot'] = _SNAPSHOT +DESCRIPTOR.message_types_by_name['DocChange'] = _DOCCHANGE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +TestSuite = _reflection.GeneratedProtocolMessageType('TestSuite', (_message.Message,), dict( + DESCRIPTOR = _TESTSUITE, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.TestSuite) + )) +_sym_db.RegisterMessage(TestSuite) Test = _reflection.GeneratedProtocolMessageType('Test', (_message.Message,), dict( DESCRIPTOR = _TEST, @@ -601,6 +1229,55 @@ )) _sym_db.RegisterMessage(SetOption) +QueryTest = _reflection.GeneratedProtocolMessageType('QueryTest', (_message.Message,), dict( + DESCRIPTOR = _QUERYTEST, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.QueryTest) + )) +_sym_db.RegisterMessage(QueryTest) + +Clause = _reflection.GeneratedProtocolMessageType('Clause', (_message.Message,), dict( + DESCRIPTOR = _CLAUSE, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.Clause) + )) +_sym_db.RegisterMessage(Clause) + +Select = _reflection.GeneratedProtocolMessageType('Select', (_message.Message,), dict( + DESCRIPTOR = _SELECT, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.Select) + )) +_sym_db.RegisterMessage(Select) + +Where = _reflection.GeneratedProtocolMessageType('Where', (_message.Message,), dict( + DESCRIPTOR = _WHERE, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.Where) + )) +_sym_db.RegisterMessage(Where) + +OrderBy = _reflection.GeneratedProtocolMessageType('OrderBy', (_message.Message,), dict( + DESCRIPTOR = _ORDERBY, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.OrderBy) + )) +_sym_db.RegisterMessage(OrderBy) + +Cursor = _reflection.GeneratedProtocolMessageType('Cursor', (_message.Message,), dict( + DESCRIPTOR = _CURSOR, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.Cursor) + )) +_sym_db.RegisterMessage(Cursor) + +DocSnapshot = _reflection.GeneratedProtocolMessageType('DocSnapshot', (_message.Message,), dict( + DESCRIPTOR = _DOCSNAPSHOT, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.DocSnapshot) + )) +_sym_db.RegisterMessage(DocSnapshot) + FieldPath = _reflection.GeneratedProtocolMessageType('FieldPath', (_message.Message,), dict( DESCRIPTOR = _FIELDPATH, __module__ = 'test_pb2' @@ -608,5 +1285,28 @@ )) _sym_db.RegisterMessage(FieldPath) +ListenTest = _reflection.GeneratedProtocolMessageType('ListenTest', (_message.Message,), dict( + DESCRIPTOR = _LISTENTEST, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.ListenTest) + )) +_sym_db.RegisterMessage(ListenTest) + +Snapshot = _reflection.GeneratedProtocolMessageType('Snapshot', (_message.Message,), dict( + DESCRIPTOR = _SNAPSHOT, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.Snapshot) + )) +_sym_db.RegisterMessage(Snapshot) + +DocChange = _reflection.GeneratedProtocolMessageType('DocChange', (_message.Message,), dict( + DESCRIPTOR = _DOCCHANGE, + __module__ = 'test_pb2' + # @@protoc_insertion_point(class_scope:tests.DocChange) + )) +_sym_db.RegisterMessage(DocChange) + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n&com.google.cloud.firestore.conformance\252\002\"Google.Cloud.Firestore.Tests.Proto\312\002(Google\\Cloud\\Firestore\\Tests\\Conformance')) # @@protoc_insertion_point(module_scope) diff --git a/firestore/tests/unit/testdata/create-basic.textproto b/firestore/tests/unit/testdata/create-basic.textproto new file mode 100644 index 000000000000..433ffda72704 --- /dev/null +++ b/firestore/tests/unit/testdata/create-basic.textproto @@ -0,0 +1,27 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A simple call, resulting in a single update operation. + +description: "create: basic" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/testdata/create-complex.textproto b/firestore/tests/unit/testdata/create-complex.textproto new file mode 100644 index 000000000000..00a994e204a2 --- /dev/null +++ b/firestore/tests/unit/testdata/create-complex.textproto @@ -0,0 +1,61 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A call to a write method with complicated input data. + +description: "create: complex" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + array_value: < + values: < + integer_value: 1 + > + values: < + double_value: 2.5 + > + > + > + > + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "c" + value: < + array_value: < + values: < + string_value: "three" + > + values: < + map_value: < + fields: < + key: "d" + value: < + boolean_value: true + > + > + > + > + > + > + > + > + > + > + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/testdata/create-del-noarray-nested.textproto b/firestore/tests/unit/testdata/create-del-noarray-nested.textproto new file mode 100644 index 000000000000..60694e137163 --- /dev/null +++ b/firestore/tests/unit/testdata/create-del-noarray-nested.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "create: Delete cannot be anywhere inside an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/create-del-noarray.textproto b/firestore/tests/unit/testdata/create-del-noarray.textproto new file mode 100644 index 000000000000..5731be1c7357 --- /dev/null +++ b/firestore/tests/unit/testdata/create-del-noarray.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "create: Delete cannot be in an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"Delete\"]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/create-empty.textproto b/firestore/tests/unit/testdata/create-empty.textproto new file mode 100644 index 000000000000..2b6fec7efafd --- /dev/null +++ b/firestore/tests/unit/testdata/create-empty.textproto @@ -0,0 +1,20 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + + +description: "create: creating or setting an empty map" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/testdata/create-nodel.textproto b/firestore/tests/unit/testdata/create-nodel.textproto new file mode 100644 index 000000000000..c878814b1128 --- /dev/null +++ b/firestore/tests/unit/testdata/create-nodel.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel cannot be used in Create, or in Set without a Merge option. + +description: "create: Delete cannot appear in data" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"Delete\"}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/create-nosplit.textproto b/firestore/tests/unit/testdata/create-nosplit.textproto new file mode 100644 index 000000000000..e9e1ee2755f5 --- /dev/null +++ b/firestore/tests/unit/testdata/create-nosplit.textproto @@ -0,0 +1,40 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Create and Set treat their map keys literally. They do not split on dots. + +description: "create: don\342\200\231t split on dots" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{ \"a.b\": { \"c.d\": 1 }, \"e\": 2 }" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a.b" + value: < + map_value: < + fields: < + key: "c.d" + value: < + integer_value: 1 + > + > + > + > + > + fields: < + key: "e" + value: < + integer_value: 2 + > + > + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/testdata/create-special-chars.textproto b/firestore/tests/unit/testdata/create-special-chars.textproto new file mode 100644 index 000000000000..3a7acd3075de --- /dev/null +++ b/firestore/tests/unit/testdata/create-special-chars.textproto @@ -0,0 +1,41 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Create and Set treat their map keys literally. They do not escape special +# characters. + +description: "create: non-alpha characters in map keys" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{ \"*\": { \".\": 1 }, \"~\": 2 }" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "*" + value: < + map_value: < + fields: < + key: "." + value: < + integer_value: 1 + > + > + > + > + > + fields: < + key: "~" + value: < + integer_value: 2 + > + > + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/testdata/create-st-alone.textproto b/firestore/tests/unit/testdata/create-st-alone.textproto new file mode 100644 index 000000000000..9803a676bbe0 --- /dev/null +++ b/firestore/tests/unit/testdata/create-st-alone.textproto @@ -0,0 +1,26 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ServerTimestamps, then no update operation +# should be produced. + +description: "create: ServerTimestamp alone" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + set_to_server_value: REQUEST_TIME + > + > + current_document: < + exists: false + > + > + > +> diff --git a/firestore/tests/unit/testdata/create-st-multi.textproto b/firestore/tests/unit/testdata/create-st-multi.textproto new file mode 100644 index 000000000000..cb3db480999a --- /dev/null +++ b/firestore/tests/unit/testdata/create-st-multi.textproto @@ -0,0 +1,41 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ServerTimestamp field. Since all the +# ServerTimestamp fields are removed, the only field in the update is "a". + +description: "create: multiple ServerTimestamp fields" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c.d" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/create-st-nested.textproto b/firestore/tests/unit/testdata/create-st-nested.textproto new file mode 100644 index 000000000000..6bc03e8e7ca0 --- /dev/null +++ b/firestore/tests/unit/testdata/create-st-nested.textproto @@ -0,0 +1,38 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A ServerTimestamp value can occur at any depth. In this case, the transform +# applies to the field path "b.c". Since "c" is removed from the update, "b" +# becomes empty, so it is also removed from the update. + +description: "create: nested ServerTimestamp field" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/create-st-noarray-nested.textproto b/firestore/tests/unit/testdata/create-st-noarray-nested.textproto new file mode 100644 index 000000000000..0cec0aebd4bf --- /dev/null +++ b/firestore/tests/unit/testdata/create-st-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. + +description: "create: ServerTimestamp cannot be anywhere inside an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/create-st-noarray.textproto b/firestore/tests/unit/testdata/create-st-noarray.textproto new file mode 100644 index 000000000000..56d91c2cfb5a --- /dev/null +++ b/firestore/tests/unit/testdata/create-st-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. Firestore transforms +# don't support array indexing. + +description: "create: ServerTimestamp cannot be in an array value" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/create-st.textproto b/firestore/tests/unit/testdata/create-st.textproto new file mode 100644 index 000000000000..ddfc6a177e16 --- /dev/null +++ b/firestore/tests/unit/testdata/create-st.textproto @@ -0,0 +1,39 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with the special ServerTimestamp sentinel is removed from the data in the +# update operation. Instead it appears in a separate Transform operation. Note +# that in these tests, the string "ServerTimestamp" should be replaced with the +# special ServerTimestamp value. + +description: "create: ServerTimestamp with data" +create: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + current_document: < + exists: false + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/delete-exists-precond.textproto b/firestore/tests/unit/testdata/delete-exists-precond.textproto new file mode 100644 index 000000000000..c9cf2ddea4e6 --- /dev/null +++ b/firestore/tests/unit/testdata/delete-exists-precond.textproto @@ -0,0 +1,21 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Delete supports an exists precondition. + +description: "delete: delete with exists precondition" +delete: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + exists: true + > + request: < + database: "projects/projectID/databases/(default)" + writes: < + delete: "projects/projectID/databases/(default)/documents/C/d" + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/delete-no-precond.textproto b/firestore/tests/unit/testdata/delete-no-precond.textproto new file mode 100644 index 000000000000..a396cdb8c4a1 --- /dev/null +++ b/firestore/tests/unit/testdata/delete-no-precond.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ordinary Delete call. + +description: "delete: delete without precondition" +delete: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + request: < + database: "projects/projectID/databases/(default)" + writes: < + delete: "projects/projectID/databases/(default)/documents/C/d" + > + > +> diff --git a/firestore/tests/unit/testdata/delete-time-precond.textproto b/firestore/tests/unit/testdata/delete-time-precond.textproto new file mode 100644 index 000000000000..5798f5f3b2fc --- /dev/null +++ b/firestore/tests/unit/testdata/delete-time-precond.textproto @@ -0,0 +1,25 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Delete supports a last-update-time precondition. + +description: "delete: delete with last-update-time precondition" +delete: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + update_time: < + seconds: 42 + > + > + request: < + database: "projects/projectID/databases/(default)" + writes: < + delete: "projects/projectID/databases/(default)/documents/C/d" + current_document: < + update_time: < + seconds: 42 + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/get-basic.textproto b/firestore/tests/unit/testdata/get-basic.textproto new file mode 100644 index 000000000000..2a448168255b --- /dev/null +++ b/firestore/tests/unit/testdata/get-basic.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A call to DocumentRef.Get. + +description: "get: get a document" +get: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + request: < + name: "projects/projectID/databases/(default)/documents/C/d" + > +> diff --git a/firestore/tests/unit/testdata/listen-add-mod-del-add.textproto b/firestore/tests/unit/testdata/listen-add-mod-del-add.textproto new file mode 100644 index 000000000000..1aa8dcbc3645 --- /dev/null +++ b/firestore/tests/unit/testdata/listen-add-mod-del-add.textproto @@ -0,0 +1,246 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Various changes to a single document. + +description: "listen: add a doc, modify it, delete it, then add it again" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + responses: < + document_delete: < + document: "projects/projectID/databases/(default)/documents/C/d1" + > + > + responses: < + target_change: < + read_time: < + seconds: 3 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 4 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + changes: < + kind: MODIFIED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + > + read_time: < + seconds: 2 + > + > + snapshots: < + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + new_index: -1 + > + read_time: < + seconds: 3 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + old_index: -1 + > + read_time: < + seconds: 4 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-add-one.textproto b/firestore/tests/unit/testdata/listen-add-one.textproto new file mode 100644 index 000000000000..2ad1d8e976da --- /dev/null +++ b/firestore/tests/unit/testdata/listen-add-one.textproto @@ -0,0 +1,79 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Snapshot with a single document. + +description: "listen: add a doc" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-add-three.textproto b/firestore/tests/unit/testdata/listen-add-three.textproto new file mode 100644 index 000000000000..ac846f76260d --- /dev/null +++ b/firestore/tests/unit/testdata/listen-add-three.textproto @@ -0,0 +1,190 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A snapshot with three documents. The documents are sorted first by the "a" +# field, then by their path. The changes are ordered the same way. + +description: "listen: add three documents" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 2 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-doc-remove.textproto b/firestore/tests/unit/testdata/listen-doc-remove.textproto new file mode 100644 index 000000000000..975200f97363 --- /dev/null +++ b/firestore/tests/unit/testdata/listen-doc-remove.textproto @@ -0,0 +1,115 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The DocumentRemove response behaves exactly like DocumentDelete. + +description: "listen: DocumentRemove behaves like DocumentDelete" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_remove: < + document: "projects/projectID/databases/(default)/documents/C/d1" + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + new_index: -1 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-empty.textproto b/firestore/tests/unit/testdata/listen-empty.textproto new file mode 100644 index 000000000000..4d04b79096c7 --- /dev/null +++ b/firestore/tests/unit/testdata/listen-empty.textproto @@ -0,0 +1,25 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There are no changes, so the snapshot should be empty. + +description: "listen: no changes; empty snapshot" +listen: < + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + snapshots: < + read_time: < + seconds: 1 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-filter-nop.textproto b/firestore/tests/unit/testdata/listen-filter-nop.textproto new file mode 100644 index 000000000000..48fd72d3ae12 --- /dev/null +++ b/firestore/tests/unit/testdata/listen-filter-nop.textproto @@ -0,0 +1,247 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Filter response whose count matches the size of the current state (docs in +# last snapshot + docs added - docs deleted) is a no-op. + +description: "listen: Filter response with same size is a no-op" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_delete: < + document: "projects/projectID/databases/(default)/documents/C/d1" + > + > + responses: < + filter: < + count: 2 + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: 1 + new_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 1 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-multi-docs.textproto b/firestore/tests/unit/testdata/listen-multi-docs.textproto new file mode 100644 index 000000000000..8778acc3d1e9 --- /dev/null +++ b/firestore/tests/unit/testdata/listen-multi-docs.textproto @@ -0,0 +1,524 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Changes should be ordered with deletes first, then additions, then mods, each in +# query order. Old indices refer to the immediately previous state, not the +# previous snapshot + +description: "listen: multiple documents, added, deleted and updated" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d5" + fields: < + key: "a" + value: < + integer_value: 4 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_delete: < + document: "projects/projectID/databases/(default)/documents/C/d3" + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: -1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d6" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_delete: < + document: "projects/projectID/databases/(default)/documents/C/d2" + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: -2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 4 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 2 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 3 + > + read_time: < + seconds: 2 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: -2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: -1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d6" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d5" + fields: < + key: "a" + value: < + integer_value: 4 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + new_index: -1 + > + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + new_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d6" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 2 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d5" + fields: < + key: "a" + value: < + integer_value: 4 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 3 + > + changes: < + kind: MODIFIED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d4" + fields: < + key: "a" + value: < + integer_value: -2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + > + changes: < + kind: MODIFIED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: -1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + old_index: 1 + new_index: 1 + > + read_time: < + seconds: 4 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-nocurrent.textproto b/firestore/tests/unit/testdata/listen-nocurrent.textproto new file mode 100644 index 000000000000..24239b6456f9 --- /dev/null +++ b/firestore/tests/unit/testdata/listen-nocurrent.textproto @@ -0,0 +1,141 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the watch state is not marked CURRENT, no snapshot is issued. + +description: "listen: no snapshot if we don't see CURRENT" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + old_index: -1 + new_index: 1 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-nomod.textproto b/firestore/tests/unit/testdata/listen-nomod.textproto new file mode 100644 index 000000000000..2a99edc350c8 --- /dev/null +++ b/firestore/tests/unit/testdata/listen-nomod.textproto @@ -0,0 +1,143 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Document updates are recognized by a change in the update time, not the data. +# This shouldn't actually happen. It is just a test of the update logic. + +description: "listen: add a doc, then change it but without changing its update time" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + responses: < + document_delete: < + document: "projects/projectID/databases/(default)/documents/C/d1" + > + > + responses: < + target_change: < + read_time: < + seconds: 3 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + new_index: -1 + > + read_time: < + seconds: 3 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-removed-target-ids.textproto b/firestore/tests/unit/testdata/listen-removed-target-ids.textproto new file mode 100644 index 000000000000..1e8ead2d8048 --- /dev/null +++ b/firestore/tests/unit/testdata/listen-removed-target-ids.textproto @@ -0,0 +1,131 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A DocumentChange with the watch target ID in the removed_target_ids field is the +# same as deleting a document. + +description: "listen: DocumentChange with removed_target_id is like a delete." +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + removed_target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + new_index: -1 + > + read_time: < + seconds: 2 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-reset.textproto b/firestore/tests/unit/testdata/listen-reset.textproto new file mode 100644 index 000000000000..89a75df2783a --- /dev/null +++ b/firestore/tests/unit/testdata/listen-reset.textproto @@ -0,0 +1,382 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A RESET message turns off the CURRENT state, and marks all documents as deleted. + +# If a document appeared on the stream but was never part of a snapshot ("d3" in +# this test), a reset will make it disappear completely. + +# For a snapshot to happen at a NO_CHANGE reponse, we need to have both seen a +# CURRENT response, and have a change from the previous snapshot. Here, after the +# reset, we see the same version of d2 again. That doesn't result in a snapshot. + +description: "listen: RESET turns off CURRENT" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: RESET + > + > + responses: < + target_change: < + read_time: < + seconds: 2 + > + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 3 + > + > + > + responses: < + target_change: < + target_change_type: RESET + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + read_time: < + seconds: 4 + > + > + > + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + read_time: < + seconds: 5 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + old_index: -1 + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + new_index: 1 + > + read_time: < + seconds: 1 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + changes: < + kind: REMOVED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 2 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: 1 + new_index: -1 + > + changes: < + kind: MODIFIED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + > + read_time: < + seconds: 3 + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d2" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 3 + > + > + docs: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d3" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 2 + > + > + old_index: -1 + new_index: 1 + > + read_time: < + seconds: 5 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-target-add-nop.textproto b/firestore/tests/unit/testdata/listen-target-add-nop.textproto new file mode 100644 index 000000000000..3fa7cce56e27 --- /dev/null +++ b/firestore/tests/unit/testdata/listen-target-add-nop.textproto @@ -0,0 +1,88 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A TargetChange_ADD response must have the same watch target ID. + +description: "listen: TargetChange_ADD is a no-op if it has the same target ID" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + target_change_type: ADD + target_ids: 1 + read_time: < + seconds: 2 + > + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + snapshots: < + docs: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + changes: < + kind: ADDED + doc: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + old_index: -1 + > + read_time: < + seconds: 1 + > + > +> diff --git a/firestore/tests/unit/testdata/listen-target-add-wrong-id.textproto b/firestore/tests/unit/testdata/listen-target-add-wrong-id.textproto new file mode 100644 index 000000000000..87544637b50b --- /dev/null +++ b/firestore/tests/unit/testdata/listen-target-add-wrong-id.textproto @@ -0,0 +1,50 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A TargetChange_ADD response must have the same watch target ID. + +description: "listen: TargetChange_ADD is an error if it has a different target ID" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + target_change_type: ADD + target_ids: 2 + read_time: < + seconds: 2 + > + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/listen-target-remove.textproto b/firestore/tests/unit/testdata/listen-target-remove.textproto new file mode 100644 index 000000000000..f34b0890c3f0 --- /dev/null +++ b/firestore/tests/unit/testdata/listen-target-remove.textproto @@ -0,0 +1,46 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A TargetChange_REMOVE response should never be sent. + +description: "listen: TargetChange_REMOVE should not appear" +listen: < + responses: < + document_change: < + document: < + name: "projects/projectID/databases/(default)/documents/C/d1" + fields: < + key: "a" + value: < + integer_value: 3 + > + > + create_time: < + seconds: 1 + > + update_time: < + seconds: 1 + > + > + target_ids: 1 + > + > + responses: < + target_change: < + target_change_type: CURRENT + > + > + responses: < + target_change: < + target_change_type: REMOVE + > + > + responses: < + target_change: < + read_time: < + seconds: 1 + > + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-bad-NaN.textproto b/firestore/tests/unit/testdata/query-bad-NaN.textproto new file mode 100644 index 000000000000..6806dd04ab27 --- /dev/null +++ b/firestore/tests/unit/testdata/query-bad-NaN.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# You can only compare NaN for equality. + +description: "query: where clause with non-== comparison with NaN" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "<" + json_value: "\"NaN\"" + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-bad-null.textproto b/firestore/tests/unit/testdata/query-bad-null.textproto new file mode 100644 index 000000000000..7fdfb3f2b5dd --- /dev/null +++ b/firestore/tests/unit/testdata/query-bad-null.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# You can only compare Null for equality. + +description: "query: where clause with non-== comparison with Null" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: ">" + json_value: "null" + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-cursor-docsnap-order.textproto b/firestore/tests/unit/testdata/query-cursor-docsnap-order.textproto new file mode 100644 index 000000000000..bab8601e8d6c --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-docsnap-order.textproto @@ -0,0 +1,68 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# When a document snapshot is used, the client appends a __name__ order-by clause +# with the direction of the last order-by clause. + +description: "query: cursor methods with a document snapshot, existing orderBy" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + order_by: < + path: < + field: "b" + > + direction: "desc" + > + > + clauses: < + start_after: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + order_by: < + field: < + field_path: "b" + > + direction: DESCENDING + > + order_by: < + field: < + field_path: "__name__" + > + direction: DESCENDING + > + start_at: < + values: < + integer_value: 7 + > + values: < + integer_value: 8 + > + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + > + > +> diff --git a/firestore/tests/unit/testdata/query-cursor-docsnap-orderby-name.textproto b/firestore/tests/unit/testdata/query-cursor-docsnap-orderby-name.textproto new file mode 100644 index 000000000000..d0ce3df45a2f --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-docsnap-orderby-name.textproto @@ -0,0 +1,76 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If there is an existing orderBy clause on __name__, no changes are made to the +# list of orderBy clauses. + +description: "query: cursor method, doc snapshot, existing orderBy __name__" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "desc" + > + > + clauses: < + order_by: < + path: < + field: "__name__" + > + direction: "asc" + > + > + clauses: < + start_at: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + clauses: < + end_at: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: DESCENDING + > + order_by: < + field: < + field_path: "__name__" + > + direction: ASCENDING + > + start_at: < + values: < + integer_value: 7 + > + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + before: true + > + end_at: < + values: < + integer_value: 7 + > + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + > + > +> diff --git a/firestore/tests/unit/testdata/query-cursor-docsnap-where-eq.textproto b/firestore/tests/unit/testdata/query-cursor-docsnap-where-eq.textproto new file mode 100644 index 000000000000..8b1e217df5f2 --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-docsnap-where-eq.textproto @@ -0,0 +1,53 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Where clause using equality doesn't change the implicit orderBy clauses. + +description: "query: cursor methods with a document snapshot and an equality where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "3" + > + > + clauses: < + end_at: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + where: < + field_filter: < + field: < + field_path: "a" + > + op: EQUAL + value: < + integer_value: 3 + > + > + > + order_by: < + field: < + field_path: "__name__" + > + direction: ASCENDING + > + end_at: < + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + > + > +> diff --git a/firestore/tests/unit/testdata/query-cursor-docsnap-where-neq-orderby.textproto b/firestore/tests/unit/testdata/query-cursor-docsnap-where-neq-orderby.textproto new file mode 100644 index 000000000000..a69edfc50d11 --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-docsnap-where-neq-orderby.textproto @@ -0,0 +1,72 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If there is an OrderBy clause, the inequality Where clause does not result in a +# new OrderBy clause. We still add a __name__ OrderBy clause + +description: "query: cursor method, doc snapshot, inequality where clause, and existing orderBy clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "desc" + > + > + clauses: < + where: < + path: < + field: "a" + > + op: "<" + json_value: "4" + > + > + clauses: < + start_at: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + where: < + field_filter: < + field: < + field_path: "a" + > + op: LESS_THAN + value: < + integer_value: 4 + > + > + > + order_by: < + field: < + field_path: "a" + > + direction: DESCENDING + > + order_by: < + field: < + field_path: "__name__" + > + direction: DESCENDING + > + start_at: < + values: < + integer_value: 7 + > + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + before: true + > + > +> diff --git a/firestore/tests/unit/testdata/query-cursor-docsnap-where-neq.textproto b/firestore/tests/unit/testdata/query-cursor-docsnap-where-neq.textproto new file mode 100644 index 000000000000..871dd0ba3392 --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-docsnap-where-neq.textproto @@ -0,0 +1,64 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Where clause with an inequality results in an OrderBy clause on that clause's +# path, if there are no other OrderBy clauses. + +description: "query: cursor method with a document snapshot and an inequality where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "<=" + json_value: "3" + > + > + clauses: < + end_before: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + where: < + field_filter: < + field: < + field_path: "a" + > + op: LESS_THAN_OR_EQUAL + value: < + integer_value: 3 + > + > + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + order_by: < + field: < + field_path: "__name__" + > + direction: ASCENDING + > + end_at: < + values: < + integer_value: 7 + > + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + before: true + > + > +> diff --git a/firestore/tests/unit/testdata/query-cursor-docsnap.textproto b/firestore/tests/unit/testdata/query-cursor-docsnap.textproto new file mode 100644 index 000000000000..184bffc2d326 --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-docsnap.textproto @@ -0,0 +1,34 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# When a document snapshot is used, the client appends a __name__ order-by clause. + +description: "query: cursor methods with a document snapshot" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + start_at: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "__name__" + > + direction: ASCENDING + > + start_at: < + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D" + > + before: true + > + > +> diff --git a/firestore/tests/unit/testdata/query-cursor-no-order.textproto b/firestore/tests/unit/testdata/query-cursor-no-order.textproto new file mode 100644 index 000000000000..fb999ddabb0f --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-no-order.textproto @@ -0,0 +1,16 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a cursor method with a list of values is provided, there must be at least as +# many explicit orderBy clauses as values. + +description: "query: cursor method without orderBy" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + start_at: < + json_values: "2" + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-cursor-vals-1a.textproto b/firestore/tests/unit/testdata/query-cursor-vals-1a.textproto new file mode 100644 index 000000000000..bb08ab7d4d5b --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-vals-1a.textproto @@ -0,0 +1,50 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor methods take the same number of values as there are OrderBy clauses. + +description: "query: StartAt/EndBefore with values" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + start_at: < + json_values: "7" + > + > + clauses: < + end_before: < + json_values: "9" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + start_at: < + values: < + integer_value: 7 + > + before: true + > + end_at: < + values: < + integer_value: 9 + > + before: true + > + > +> diff --git a/firestore/tests/unit/testdata/query-cursor-vals-1b.textproto b/firestore/tests/unit/testdata/query-cursor-vals-1b.textproto new file mode 100644 index 000000000000..41e69e9e6f14 --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-vals-1b.textproto @@ -0,0 +1,48 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor methods take the same number of values as there are OrderBy clauses. + +description: "query: StartAfter/EndAt with values" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + start_after: < + json_values: "7" + > + > + clauses: < + end_at: < + json_values: "9" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + start_at: < + values: < + integer_value: 7 + > + > + end_at: < + values: < + integer_value: 9 + > + > + > +> diff --git a/firestore/tests/unit/testdata/query-cursor-vals-2.textproto b/firestore/tests/unit/testdata/query-cursor-vals-2.textproto new file mode 100644 index 000000000000..8e37ad0035fa --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-vals-2.textproto @@ -0,0 +1,71 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor methods take the same number of values as there are OrderBy clauses. + +description: "query: Start/End with two values" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + order_by: < + path: < + field: "b" + > + direction: "desc" + > + > + clauses: < + start_at: < + json_values: "7" + json_values: "8" + > + > + clauses: < + end_at: < + json_values: "9" + json_values: "10" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + order_by: < + field: < + field_path: "b" + > + direction: DESCENDING + > + start_at: < + values: < + integer_value: 7 + > + values: < + integer_value: 8 + > + before: true + > + end_at: < + values: < + integer_value: 9 + > + values: < + integer_value: 10 + > + > + > +> diff --git a/firestore/tests/unit/testdata/query-cursor-vals-docid.textproto b/firestore/tests/unit/testdata/query-cursor-vals-docid.textproto new file mode 100644 index 000000000000..91af3486c998 --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-vals-docid.textproto @@ -0,0 +1,50 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Cursor values corresponding to a __name__ field take the document path relative +# to the query's collection. + +description: "query: cursor methods with __name__" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "__name__" + > + direction: "asc" + > + > + clauses: < + start_after: < + json_values: "\"D1\"" + > + > + clauses: < + end_before: < + json_values: "\"D2\"" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "__name__" + > + direction: ASCENDING + > + start_at: < + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D1" + > + > + end_at: < + values: < + reference_value: "projects/projectID/databases/(default)/documents/C/D2" + > + before: true + > + > +> diff --git a/firestore/tests/unit/testdata/query-cursor-vals-last-wins.textproto b/firestore/tests/unit/testdata/query-cursor-vals-last-wins.textproto new file mode 100644 index 000000000000..9e8fbb19f336 --- /dev/null +++ b/firestore/tests/unit/testdata/query-cursor-vals-last-wins.textproto @@ -0,0 +1,60 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# When multiple Start* or End* calls occur, the values of the last one are used. + +description: "query: cursor methods, last one wins" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + start_after: < + json_values: "1" + > + > + clauses: < + start_at: < + json_values: "2" + > + > + clauses: < + end_at: < + json_values: "3" + > + > + clauses: < + end_before: < + json_values: "4" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "a" + > + direction: ASCENDING + > + start_at: < + values: < + integer_value: 2 + > + before: true + > + end_at: < + values: < + integer_value: 4 + > + before: true + > + > +> diff --git a/firestore/tests/unit/testdata/query-del-cursor.textproto b/firestore/tests/unit/testdata/query-del-cursor.textproto new file mode 100644 index 000000000000..c9d4adb7c5dc --- /dev/null +++ b/firestore/tests/unit/testdata/query-del-cursor.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Sentinel values are not permitted in queries. + +description: "query: Delete in cursor method" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + end_before: < + json_values: "\"Delete\"" + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-del-where.textproto b/firestore/tests/unit/testdata/query-del-where.textproto new file mode 100644 index 000000000000..8e92529492ea --- /dev/null +++ b/firestore/tests/unit/testdata/query-del-where.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Sentinel values are not permitted in queries. + +description: "query: Delete in Where" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "\"Delete\"" + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-invalid-operator.textproto b/firestore/tests/unit/testdata/query-invalid-operator.textproto new file mode 100644 index 000000000000..e580c64a759f --- /dev/null +++ b/firestore/tests/unit/testdata/query-invalid-operator.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The != operator is not supported. + +description: "query: invalid operator in Where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "!=" + json_value: "4" + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-invalid-path-order.textproto b/firestore/tests/unit/testdata/query-invalid-path-order.textproto new file mode 100644 index 000000000000..e0a72057620c --- /dev/null +++ b/firestore/tests/unit/testdata/query-invalid-path-order.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The path has an empty component. + +description: "query: invalid path in OrderBy clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "*" + field: "" + > + direction: "asc" + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-invalid-path-select.textproto b/firestore/tests/unit/testdata/query-invalid-path-select.textproto new file mode 100644 index 000000000000..944f984f7fa9 --- /dev/null +++ b/firestore/tests/unit/testdata/query-invalid-path-select.textproto @@ -0,0 +1,18 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The path has an empty component. + +description: "query: invalid path in Where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + select: < + fields: < + field: "*" + field: "" + > + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-invalid-path-where.textproto b/firestore/tests/unit/testdata/query-invalid-path-where.textproto new file mode 100644 index 000000000000..527923b09799 --- /dev/null +++ b/firestore/tests/unit/testdata/query-invalid-path-where.textproto @@ -0,0 +1,20 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The path has an empty component. + +description: "query: invalid path in Where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "*" + field: "" + > + op: "==" + json_value: "4" + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-offset-limit-last-wins.textproto b/firestore/tests/unit/testdata/query-offset-limit-last-wins.textproto new file mode 100644 index 000000000000..dc301f439e8d --- /dev/null +++ b/firestore/tests/unit/testdata/query-offset-limit-last-wins.textproto @@ -0,0 +1,30 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# With multiple Offset or Limit clauses, the last one wins. + +description: "query: multiple Offset and Limit clauses" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + offset: 2 + > + clauses: < + limit: 3 + > + clauses: < + limit: 4 + > + clauses: < + offset: 5 + > + query: < + from: < + collection_id: "C" + > + offset: 5 + limit: < + value: 4 + > + > +> diff --git a/firestore/tests/unit/testdata/query-offset-limit.textproto b/firestore/tests/unit/testdata/query-offset-limit.textproto new file mode 100644 index 000000000000..136d9d46a615 --- /dev/null +++ b/firestore/tests/unit/testdata/query-offset-limit.textproto @@ -0,0 +1,24 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Offset and Limit clauses. + +description: "query: Offset and Limit clauses" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + offset: 2 + > + clauses: < + limit: 3 + > + query: < + from: < + collection_id: "C" + > + offset: 2 + limit: < + value: 3 + > + > +> diff --git a/firestore/tests/unit/testdata/query-order.textproto b/firestore/tests/unit/testdata/query-order.textproto new file mode 100644 index 000000000000..7ed4c4ead840 --- /dev/null +++ b/firestore/tests/unit/testdata/query-order.textproto @@ -0,0 +1,42 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Multiple OrderBy clauses combine. + +description: "query: basic OrderBy clauses" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "b" + > + direction: "asc" + > + > + clauses: < + order_by: < + path: < + field: "a" + > + direction: "desc" + > + > + query: < + from: < + collection_id: "C" + > + order_by: < + field: < + field_path: "b" + > + direction: ASCENDING + > + order_by: < + field: < + field_path: "a" + > + direction: DESCENDING + > + > +> diff --git a/firestore/tests/unit/testdata/query-select-empty.textproto b/firestore/tests/unit/testdata/query-select-empty.textproto new file mode 100644 index 000000000000..def8b55ac515 --- /dev/null +++ b/firestore/tests/unit/testdata/query-select-empty.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An empty Select clause selects just the document ID. + +description: "query: empty Select clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + select: < + > + > + query: < + select: < + fields: < + field_path: "__name__" + > + > + from: < + collection_id: "C" + > + > +> diff --git a/firestore/tests/unit/testdata/query-select-last-wins.textproto b/firestore/tests/unit/testdata/query-select-last-wins.textproto new file mode 100644 index 000000000000..bd78d09eb9b8 --- /dev/null +++ b/firestore/tests/unit/testdata/query-select-last-wins.textproto @@ -0,0 +1,36 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The last Select clause is the only one used. + +description: "query: two Select clauses" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + select: < + fields: < + field: "a" + > + fields: < + field: "b" + > + > + > + clauses: < + select: < + fields: < + field: "c" + > + > + > + query: < + select: < + fields: < + field_path: "c" + > + > + from: < + collection_id: "C" + > + > +> diff --git a/firestore/tests/unit/testdata/query-select.textproto b/firestore/tests/unit/testdata/query-select.textproto new file mode 100644 index 000000000000..15e11249730c --- /dev/null +++ b/firestore/tests/unit/testdata/query-select.textproto @@ -0,0 +1,32 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# An ordinary Select clause. + +description: "query: Select clause with some fields" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + select: < + fields: < + field: "a" + > + fields: < + field: "b" + > + > + > + query: < + select: < + fields: < + field_path: "a" + > + fields: < + field_path: "b" + > + > + from: < + collection_id: "C" + > + > +> diff --git a/firestore/tests/unit/testdata/query-st-cursor.textproto b/firestore/tests/unit/testdata/query-st-cursor.textproto new file mode 100644 index 000000000000..66885d0dd5dc --- /dev/null +++ b/firestore/tests/unit/testdata/query-st-cursor.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Sentinel values are not permitted in queries. + +description: "query: ServerTimestamp in cursor method" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + order_by: < + path: < + field: "a" + > + direction: "asc" + > + > + clauses: < + end_before: < + json_values: "\"ServerTimestamp\"" + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-st-where.textproto b/firestore/tests/unit/testdata/query-st-where.textproto new file mode 100644 index 000000000000..05da28d54291 --- /dev/null +++ b/firestore/tests/unit/testdata/query-st-where.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Sentinel values are not permitted in queries. + +description: "query: ServerTimestamp in Where" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "\"ServerTimestamp\"" + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/query-where-2.textproto b/firestore/tests/unit/testdata/query-where-2.textproto new file mode 100644 index 000000000000..1034463079e1 --- /dev/null +++ b/firestore/tests/unit/testdata/query-where-2.textproto @@ -0,0 +1,59 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Multiple Where clauses are combined into a composite filter. + +description: "query: two Where clauses" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: ">=" + json_value: "5" + > + > + clauses: < + where: < + path: < + field: "b" + > + op: "<" + json_value: "\"foo\"" + > + > + query: < + from: < + collection_id: "C" + > + where: < + composite_filter: < + op: AND + filters: < + field_filter: < + field: < + field_path: "a" + > + op: GREATER_THAN_OR_EQUAL + value: < + integer_value: 5 + > + > + > + filters: < + field_filter: < + field: < + field_path: "b" + > + op: LESS_THAN + value: < + string_value: "foo" + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/query-where-NaN.textproto b/firestore/tests/unit/testdata/query-where-NaN.textproto new file mode 100644 index 000000000000..4a97ca7dde1f --- /dev/null +++ b/firestore/tests/unit/testdata/query-where-NaN.textproto @@ -0,0 +1,31 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Where clause that tests for equality with NaN results in a unary filter. + +description: "query: a Where clause comparing to NaN" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "\"NaN\"" + > + > + query: < + from: < + collection_id: "C" + > + where: < + unary_filter: < + op: IS_NAN + field: < + field_path: "a" + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/query-where-null.textproto b/firestore/tests/unit/testdata/query-where-null.textproto new file mode 100644 index 000000000000..1869c60c72aa --- /dev/null +++ b/firestore/tests/unit/testdata/query-where-null.textproto @@ -0,0 +1,31 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Where clause that tests for equality with null results in a unary filter. + +description: "query: a Where clause comparing to null" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: "==" + json_value: "null" + > + > + query: < + from: < + collection_id: "C" + > + where: < + unary_filter: < + op: IS_NULL + field: < + field_path: "a" + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/query-where.textproto b/firestore/tests/unit/testdata/query-where.textproto new file mode 100644 index 000000000000..045c2befab88 --- /dev/null +++ b/firestore/tests/unit/testdata/query-where.textproto @@ -0,0 +1,34 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A simple Where clause. + +description: "query: Where clause" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + where: < + path: < + field: "a" + > + op: ">" + json_value: "5" + > + > + query: < + from: < + collection_id: "C" + > + where: < + field_filter: < + field: < + field_path: "a" + > + op: GREATER_THAN + value: < + integer_value: 5 + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/query-wrong-collection.textproto b/firestore/tests/unit/testdata/query-wrong-collection.textproto new file mode 100644 index 000000000000..ad6f353d5fc9 --- /dev/null +++ b/firestore/tests/unit/testdata/query-wrong-collection.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a document snapshot is passed to a Start*/End* method, it must be in the same +# collection as the query. + +description: "query: doc snapshot with wrong collection in cursor method" +query: < + coll_path: "projects/projectID/databases/(default)/documents/C" + clauses: < + end_before: < + doc_snapshot: < + path: "projects/projectID/databases/(default)/documents/C2/D" + json_data: "{\"a\": 7, \"b\": 8}" + > + > + > + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-basic.textproto b/firestore/tests/unit/testdata/set-basic.textproto new file mode 100644 index 000000000000..e9b292e3cdc3 --- /dev/null +++ b/firestore/tests/unit/testdata/set-basic.textproto @@ -0,0 +1,24 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A simple call, resulting in a single update operation. + +description: "set: basic" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-complex.textproto b/firestore/tests/unit/testdata/set-complex.textproto new file mode 100644 index 000000000000..6ec19500a2d0 --- /dev/null +++ b/firestore/tests/unit/testdata/set-complex.textproto @@ -0,0 +1,58 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A call to a write method with complicated input data. + +description: "set: complex" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + array_value: < + values: < + integer_value: 1 + > + values: < + double_value: 2.5 + > + > + > + > + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "c" + value: < + array_value: < + values: < + string_value: "three" + > + values: < + map_value: < + fields: < + key: "d" + value: < + boolean_value: true + > + > + > + > + > + > + > + > + > + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-del-merge-alone.textproto b/firestore/tests/unit/testdata/set-del-merge-alone.textproto new file mode 100644 index 000000000000..811ab8dfe7bb --- /dev/null +++ b/firestore/tests/unit/testdata/set-del-merge-alone.textproto @@ -0,0 +1,28 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Delete sentinel can appear with a merge option. If the delete paths are the +# only ones to be merged, then no document is sent, just an update mask. + +description: "set-merge: Delete with merge" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "b" + field: "c" + > + > + json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + update_mask: < + field_paths: "b.c" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-del-merge.textproto b/firestore/tests/unit/testdata/set-del-merge.textproto new file mode 100644 index 000000000000..b8d8631051e7 --- /dev/null +++ b/firestore/tests/unit/testdata/set-del-merge.textproto @@ -0,0 +1,37 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Delete sentinel can appear with a merge option. + +description: "set-merge: Delete with merge" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + fields: < + field: "b" + field: "c" + > + > + json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b.c" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-del-mergeall.textproto b/firestore/tests/unit/testdata/set-del-mergeall.textproto new file mode 100644 index 000000000000..af1e84524bca --- /dev/null +++ b/firestore/tests/unit/testdata/set-del-mergeall.textproto @@ -0,0 +1,31 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A Delete sentinel can appear with a mergeAll option. + +description: "set: Delete with MergeAll" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{\"a\": 1, \"b\": {\"c\": \"Delete\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b.c" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-del-noarray-nested.textproto b/firestore/tests/unit/testdata/set-del-noarray-nested.textproto new file mode 100644 index 000000000000..bbf6a3d00af3 --- /dev/null +++ b/firestore/tests/unit/testdata/set-del-noarray-nested.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "set: Delete cannot be anywhere inside an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-del-noarray.textproto b/firestore/tests/unit/testdata/set-del-noarray.textproto new file mode 100644 index 000000000000..07fc6497dc35 --- /dev/null +++ b/firestore/tests/unit/testdata/set-del-noarray.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "set: Delete cannot be in an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"Delete\"]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-del-nomerge.textproto b/firestore/tests/unit/testdata/set-del-nomerge.textproto new file mode 100644 index 000000000000..cb6ef4f85870 --- /dev/null +++ b/firestore/tests/unit/testdata/set-del-nomerge.textproto @@ -0,0 +1,17 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The client signals an error if the Delete sentinel is in the input data, but not +# selected by a merge option, because this is most likely a programming bug. + +description: "set-merge: Delete cannot appear in an unmerged field" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + > + json_data: "{\"a\": 1, \"b\": \"Delete\"}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-del-nonleaf.textproto b/firestore/tests/unit/testdata/set-del-nonleaf.textproto new file mode 100644 index 000000000000..54f22d95c521 --- /dev/null +++ b/firestore/tests/unit/testdata/set-del-nonleaf.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a Delete is part of the value at a merge path, then the user is confused: +# their merge path says "replace this entire value" but their Delete says "delete +# this part of the value". This should be an error, just as if they specified +# Delete in a Set with no merge. + +description: "set-merge: Delete cannot appear as part of a merge path" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "h" + > + > + json_data: "{\"h\": {\"g\": \"Delete\"}}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-del-wo-merge.textproto b/firestore/tests/unit/testdata/set-del-wo-merge.textproto new file mode 100644 index 000000000000..29196628bfd8 --- /dev/null +++ b/firestore/tests/unit/testdata/set-del-wo-merge.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Without a merge option, Set replaces the document with the input data. A Delete +# sentinel in the data makes no sense in this case. + +description: "set: Delete cannot appear unless a merge option is specified" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"Delete\"}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-empty.textproto b/firestore/tests/unit/testdata/set-empty.textproto new file mode 100644 index 000000000000..c2b73d3ff933 --- /dev/null +++ b/firestore/tests/unit/testdata/set-empty.textproto @@ -0,0 +1,17 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + + +description: "set: creating or setting an empty map" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-merge-fp.textproto b/firestore/tests/unit/testdata/set-merge-fp.textproto new file mode 100644 index 000000000000..68690f6f1633 --- /dev/null +++ b/firestore/tests/unit/testdata/set-merge-fp.textproto @@ -0,0 +1,40 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A merge with fields that use special characters. + +description: "set-merge: Merge with FieldPaths" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "*" + field: "~" + > + > + json_data: "{\"*\": {\"~\": true}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "*" + value: < + map_value: < + fields: < + key: "~" + value: < + boolean_value: true + > + > + > + > + > + > + update_mask: < + field_paths: "`*`.`~`" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-merge-nested.textproto b/firestore/tests/unit/testdata/set-merge-nested.textproto new file mode 100644 index 000000000000..0d1282818d76 --- /dev/null +++ b/firestore/tests/unit/testdata/set-merge-nested.textproto @@ -0,0 +1,41 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A merge option where the field is not at top level. Only fields mentioned in the +# option are present in the update operation. + +description: "set-merge: Merge with a nested field" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "h" + field: "g" + > + > + json_data: "{\"h\": {\"g\": 4, \"f\": 5}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "h" + value: < + map_value: < + fields: < + key: "g" + value: < + integer_value: 4 + > + > + > + > + > + > + update_mask: < + field_paths: "h.g" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-merge-nonleaf.textproto b/firestore/tests/unit/testdata/set-merge-nonleaf.textproto new file mode 100644 index 000000000000..ca41cb03402d --- /dev/null +++ b/firestore/tests/unit/testdata/set-merge-nonleaf.textproto @@ -0,0 +1,46 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a field path is in a merge option, the value at that path replaces the stored +# value. That is true even if the value is complex. + +description: "set-merge: Merge field is not a leaf" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "h" + > + > + json_data: "{\"h\": {\"f\": 5, \"g\": 6}, \"e\": 7}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "h" + value: < + map_value: < + fields: < + key: "f" + value: < + integer_value: 5 + > + > + fields: < + key: "g" + value: < + integer_value: 6 + > + > + > + > + > + > + update_mask: < + field_paths: "h" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-merge-prefix.textproto b/firestore/tests/unit/testdata/set-merge-prefix.textproto new file mode 100644 index 000000000000..1e2c2c50226e --- /dev/null +++ b/firestore/tests/unit/testdata/set-merge-prefix.textproto @@ -0,0 +1,21 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The prefix would make the other path meaningless, so this is probably a +# programming error. + +description: "set-merge: One merge path cannot be the prefix of another" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + fields: < + field: "a" + field: "b" + > + > + json_data: "{\"a\": {\"b\": 1}}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-merge-present.textproto b/firestore/tests/unit/testdata/set-merge-present.textproto new file mode 100644 index 000000000000..f6665de5cdc3 --- /dev/null +++ b/firestore/tests/unit/testdata/set-merge-present.textproto @@ -0,0 +1,20 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The client signals an error if a merge option mentions a path that is not in the +# input data. + +description: "set-merge: Merge fields must all be present in data" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "b" + > + fields: < + field: "a" + > + > + json_data: "{\"a\": 1}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-merge.textproto b/firestore/tests/unit/testdata/set-merge.textproto new file mode 100644 index 000000000000..279125253cb1 --- /dev/null +++ b/firestore/tests/unit/testdata/set-merge.textproto @@ -0,0 +1,32 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Fields in the input data but not in a merge option are pruned. + +description: "set-merge: Merge with a field" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + > + json_data: "{\"a\": 1, \"b\": 2}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-mergeall-empty.textproto b/firestore/tests/unit/testdata/set-mergeall-empty.textproto new file mode 100644 index 000000000000..16df8a22bed3 --- /dev/null +++ b/firestore/tests/unit/testdata/set-mergeall-empty.textproto @@ -0,0 +1,23 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# This is a valid call that can be used to ensure a document exists. + +description: "set: MergeAll can be specified with empty data." +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + update_mask: < + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-mergeall-nested.textproto b/firestore/tests/unit/testdata/set-mergeall-nested.textproto new file mode 100644 index 000000000000..1fbc6973cd28 --- /dev/null +++ b/firestore/tests/unit/testdata/set-mergeall-nested.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# MergeAll with nested fields results in an update mask that includes entries for +# all the leaf fields. + +description: "set: MergeAll with nested fields" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{\"h\": { \"g\": 3, \"f\": 4 }}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "h" + value: < + map_value: < + fields: < + key: "f" + value: < + integer_value: 4 + > + > + fields: < + key: "g" + value: < + integer_value: 3 + > + > + > + > + > + > + update_mask: < + field_paths: "h.f" + field_paths: "h.g" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-mergeall.textproto b/firestore/tests/unit/testdata/set-mergeall.textproto new file mode 100644 index 000000000000..cb2ebc52bc06 --- /dev/null +++ b/firestore/tests/unit/testdata/set-mergeall.textproto @@ -0,0 +1,37 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The MergeAll option with a simple piece of data. + +description: "set: MergeAll" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{\"a\": 1, \"b\": 2}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + fields: < + key: "b" + value: < + integer_value: 2 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-nodel.textproto b/firestore/tests/unit/testdata/set-nodel.textproto new file mode 100644 index 000000000000..0fb887d461be --- /dev/null +++ b/firestore/tests/unit/testdata/set-nodel.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel cannot be used in Create, or in Set without a Merge option. + +description: "set: Delete cannot appear in data" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"Delete\"}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-nosplit.textproto b/firestore/tests/unit/testdata/set-nosplit.textproto new file mode 100644 index 000000000000..0ff3fadcf4ba --- /dev/null +++ b/firestore/tests/unit/testdata/set-nosplit.textproto @@ -0,0 +1,37 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Create and Set treat their map keys literally. They do not split on dots. + +description: "set: don\342\200\231t split on dots" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{ \"a.b\": { \"c.d\": 1 }, \"e\": 2 }" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a.b" + value: < + map_value: < + fields: < + key: "c.d" + value: < + integer_value: 1 + > + > + > + > + > + fields: < + key: "e" + value: < + integer_value: 2 + > + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-special-chars.textproto b/firestore/tests/unit/testdata/set-special-chars.textproto new file mode 100644 index 000000000000..f4122c9f004c --- /dev/null +++ b/firestore/tests/unit/testdata/set-special-chars.textproto @@ -0,0 +1,38 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Create and Set treat their map keys literally. They do not escape special +# characters. + +description: "set: non-alpha characters in map keys" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{ \"*\": { \".\": 1 }, \"~\": 2 }" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "*" + value: < + map_value: < + fields: < + key: "." + value: < + integer_value: 1 + > + > + > + > + > + fields: < + key: "~" + value: < + integer_value: 2 + > + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st-alone-mergeall.textproto b/firestore/tests/unit/testdata/set-st-alone-mergeall.textproto new file mode 100644 index 000000000000..16ce4cfbd913 --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-alone-mergeall.textproto @@ -0,0 +1,26 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ServerTimestamps, then no update operation +# should be produced. + +description: "set: ServerTimestamp alone with MergeAll" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{\"a\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st-alone.textproto b/firestore/tests/unit/testdata/set-st-alone.textproto new file mode 100644 index 000000000000..6ce46d7f1ab5 --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-alone.textproto @@ -0,0 +1,28 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ServerTimestamps, then an update operation +# with an empty map should be produced. + +description: "set: ServerTimestamp alone" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st-merge-both.textproto b/firestore/tests/unit/testdata/set-st-merge-both.textproto new file mode 100644 index 000000000000..5cc7bbc9efbf --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-merge-both.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Just as when no merge option is specified, ServerTimestamp sentinel values are +# removed from the data in the update operation and become transforms. + +description: "set-merge: ServerTimestamp with Merge of both fields" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + fields: < + field: "b" + > + > + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st-merge-nonleaf-alone.textproto b/firestore/tests/unit/testdata/set-st-merge-nonleaf-alone.textproto new file mode 100644 index 000000000000..f513b6c804c5 --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-merge-nonleaf-alone.textproto @@ -0,0 +1,37 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a field path is in a merge option, the value at that path replaces the stored +# value. If the value has only ServerTimestamps, they become transforms and we +# clear the value by including the field path in the update mask. + +description: "set-merge: non-leaf merge field with ServerTimestamp alone" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "h" + > + > + json_data: "{\"h\": {\"g\": \"ServerTimestamp\"}, \"e\": 7}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + update_mask: < + field_paths: "h" + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "h.g" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st-merge-nonleaf.textproto b/firestore/tests/unit/testdata/set-st-merge-nonleaf.textproto new file mode 100644 index 000000000000..e53e7e2682eb --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-merge-nonleaf.textproto @@ -0,0 +1,49 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a field path is in a merge option, the value at that path replaces the stored +# value, and ServerTimestamps inside that value become transforms as usual. + +description: "set-merge: non-leaf merge field with ServerTimestamp" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "h" + > + > + json_data: "{\"h\": {\"f\": 5, \"g\": \"ServerTimestamp\"}, \"e\": 7}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "h" + value: < + map_value: < + fields: < + key: "f" + value: < + integer_value: 5 + > + > + > + > + > + > + update_mask: < + field_paths: "h" + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "h.g" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st-merge-nowrite.textproto b/firestore/tests/unit/testdata/set-st-merge-nowrite.textproto new file mode 100644 index 000000000000..3222230dc510 --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-merge-nowrite.textproto @@ -0,0 +1,28 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If all the fields in the merge option have ServerTimestamp values, then no +# update operation is produced, only a transform. + +description: "set-merge: If no ordinary values in Merge, no write" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "b" + > + > + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st-mergeall.textproto b/firestore/tests/unit/testdata/set-st-mergeall.textproto new file mode 100644 index 000000000000..b8c53a566fdd --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-mergeall.textproto @@ -0,0 +1,40 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Just as when no merge option is specified, ServerTimestamp sentinel values are +# removed from the data in the update operation and become transforms. + +description: "set: ServerTimestamp with MergeAll" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + all: true + > + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st-multi.textproto b/firestore/tests/unit/testdata/set-st-multi.textproto new file mode 100644 index 000000000000..375ec18d68fd --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-multi.textproto @@ -0,0 +1,38 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ServerTimestamp field. Since all the +# ServerTimestamp fields are removed, the only field in the update is "a". + +description: "set: multiple ServerTimestamp fields" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c.d" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st-nested.textproto b/firestore/tests/unit/testdata/set-st-nested.textproto new file mode 100644 index 000000000000..abfd2e8fd874 --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-nested.textproto @@ -0,0 +1,35 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A ServerTimestamp value can occur at any depth. In this case, the transform +# applies to the field path "b.c". Since "c" is removed from the update, "b" +# becomes empty, so it is also removed from the update. + +description: "set: nested ServerTimestamp field" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st-noarray-nested.textproto b/firestore/tests/unit/testdata/set-st-noarray-nested.textproto new file mode 100644 index 000000000000..241d79151a42 --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. + +description: "set: ServerTimestamp cannot be anywhere inside an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-st-noarray.textproto b/firestore/tests/unit/testdata/set-st-noarray.textproto new file mode 100644 index 000000000000..591fb0343854 --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. Firestore transforms +# don't support array indexing. + +description: "set: ServerTimestamp cannot be in an array value" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/set-st-nomerge.textproto b/firestore/tests/unit/testdata/set-st-nomerge.textproto new file mode 100644 index 000000000000..20c0ae1fbb0e --- /dev/null +++ b/firestore/tests/unit/testdata/set-st-nomerge.textproto @@ -0,0 +1,33 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the ServerTimestamp value is not mentioned in a merge option, then it is +# pruned from the data but does not result in a transform. + +description: "set-merge: If is ServerTimestamp not in Merge, no transform" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + option: < + fields: < + field: "a" + > + > + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + > + > +> diff --git a/firestore/tests/unit/testdata/set-st.textproto b/firestore/tests/unit/testdata/set-st.textproto new file mode 100644 index 000000000000..8bceddceeacc --- /dev/null +++ b/firestore/tests/unit/testdata/set-st.textproto @@ -0,0 +1,36 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with the special ServerTimestamp sentinel is removed from the data in the +# update operation. Instead it appears in a separate Transform operation. Note +# that in these tests, the string "ServerTimestamp" should be replaced with the +# special ServerTimestamp value. + +description: "set: ServerTimestamp with data" +set: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/test-suite.binproto b/firestore/tests/unit/testdata/test-suite.binproto new file mode 100644 index 0000000000000000000000000000000000000000..38b7e8804682a8775ce94483a6c93d786a27c468 GIT binary patch literal 38337 zcmdsA3y>Ved1hwsq*F_R(?|$u0BIO7l9$t+c1{miBtXkafkQ$d@vk z?CeP>^2(2Z9h2CGU{j8Za$F?kRGc`$PGXmxIEkH?lQ^O5DiSJA6&0r(r-Jj2ld4n_ z@^$z0?99&I?p)7Em{7&t331*3|NZa3|L!GEDrDol)5;t8V%SEhUR+vqYAvls_B5LH zN1bBJo$!9Rb84bwx9mCFb=--aC8umJRaU?Q zP$}x4Q{vcX+P3#>p!o==DE#y zJ|jupX3F!1>B|V^d(GBM*gr$JX*-BAr@>0?DDU#ZO-^`_xEE%u{bGn~an>#VV8H}n@|l|en%*2}RWIlv&1)#3N5K#cq?p#9QChxQKZNFv3AU!^V<2tT8cNr7;e)(F=!=TNtTuxU4& z_F3b!U0rhYw@dEtnK?^70BqNm<|EY8ASmB9m?pb+_RNCQq{O+ElG9D#J0%Icj%x!Y z@El8wA~Yf&0+H_~SwUmi6ZeEGp+6RIdG44I@U%kqFH6AyaAbq@l@V}6fM6v3wbDsc zx+H7Z)q2g*e_J*(Z{a&fg`qFTPEc;AQ8|fAw_z~9sStux(yJ!S&q^|+`_+VbDmDX$ zl2;NhsAL@@-zy``XC&RGE>lLBKO5gwo+Yv+MBGwATc*|^UsuRmI7kEP2lJD%;z#{p z&czn|jYPI~_%DU%jFp~+gZXV)`Ue#b=1Q_L%)i_Y{wSx zNg^8|`VJ0D}5wyryJ1pvNi)K;gT{4n|9t8Kn3cE)i+UxiLP+EnBY$x0c$S#~->o3wMRJNbT< z&^CMC5bW{BpcmO#r^_`_B+4BDQI5BsGZ%P&b{^j*(LpHV`yQj+Ha{5O+0N*MWERHD z^vk?-=Cze53)&`2qel;A@IOR2}=nx|G~NvZzQ;NxqL0+n&g=Y=QetA3y01A`KtQ=MRSbfDj*~Sna(-O0kkJT>8T8AXNVn16UZ%Tkz8O^rUCf&UnBG~Y{W^`f!BlAs z-WjgLuDRuUbJ6^!tW)yB@nFNe_~H3|Dwzyp0+#E|Ql(}$y<;49Y~I1+OtaE*X2uPRJD4tIF+VG7Y+l5k=oPV}#}_a|I%>bP9>zB77c~l!nLC}I3SMm+U8PhZM;h#q2&ZUvQJ)9{EtGe3aC+# zZ)UP4N@%%EdOZ($t4h*!-+=f%EAnz%0@60Fm=xEcp`E#-eP_?J$Zl&0&o!2jsW(|{b!SKr%^fEbP zammHV7Ptkcji%#bGFFUknRiR-H%s#7cz3`*Tt+YM8HF4O`jGIz*ixFitUuB-=F_b`xWl zWgoBxYFhKbBvdWvn?te7cfques*sxkL>;d=o}B2mf%%-)0@I^%<#9$E5?xHjn6hAX zJSFW9>X-3I!KYzz?KbH8I{KJP4Va^ts?L5v(kKIS5Uto|IZXqzjcE=gdBFKRMymxY z-b_UHlak4Z+)Tua@iUSq716b2%*zf)EHn2If5sYP1D6WWxV~ng5>jz_*@I;BOl*BP z7D7dS>d>3E|5Jr*3UkMk)a7V=a8^IGexE(amhKL2L%OPS;GAFsGABn1H*qPbJ0h9TKXDJwgqOB2;#3P89pOjG}9n(T!hGQyn$L1QG`Md7*tb4IA!_b*Gq0B|nQsL^W* zxm=JGx)hm}ZI9~fSmHEJFa6}&=2P)4ZdPAShLLmOS7~i3DF6+FB{mx@!6EaU7&s`R zQIA15mE0=0(v%5cLL&A@r0=y!s|@oD5!)HE_^pwCW9<|D1NJ?4l^{&sNW7(Sww$TsNX`GfoFF z&Y6#npThTjm`7QWy6}ONgkWGOWwW)3Xmt3p2uLh6upoS#qqjo z*?*Kp;98y}v%_R&FCtNwicuUtxGMnUVLw~19csK)Qp|3D#c z6=u{Ofq1eRgy#=s;kl~gNTNql=3}v8`4NhPbx2N4wKbkkg8Q(^R>8JcqjauNXQ$-W z*9o{hPp^=(680N-_<`@Si2#VhM4KLS z<_MeR#TyLf)qS5|$i5iggnn3Bgs=KdGyxoUK-M3~T2lu%C1;KZPm_Ja+Jwk2^suIT zE|!fgS-5a57gm_UclcYu^}yt$jv@xzcRTZyr17WwC<8z9%tn2!wBn9vPCNUiZY#d# z*P{Z<0nJ70ONrs6Mp*2vlO|xMuRQgD`Ts*}kq%7scW-?fjF#lU1owGc#l^fXM`Jz$ zxBF>@9EnR;|2~@ajs!u_yqXf}#iDLH+mz5B!`06)p@Tb(*6}1HjSAfOMhvcx;=bnq zDh8k=*<0eG+!R2`ql$s^&wksW+m?6XF)sSPp*K-XD_`qtvLUpS5oK3$s|C#@dfrPr zj|nD&ZP9R~SNiZ4k4pB1CE;J?F+84mzmX2thx)J9_hk9UR%QUrkZoaQ=`IjukJ5FoB0MdczCr+{B{af|nAQur zC)vRWKk4PZl9g?yGKtvnJE$!u_UkWM-yjFFG?bT1HcDC+Epqyv;(} zaDV33F-XU}oLjhGYTtDPK8+oH(23g(1jF1K&MNQ)6Ww(G(`2i+f9p&gH`@ZNx`nqT z-xFse$RFs%h;;Hp(l&xL8uWa?Zqi4GTXmzhRIN&i$Di~F z#?i_mo+#!D;}-6e+FmtOP2HK%*D}tt>PTj{O4?+nx0_!|FT3r%Nw&q-@VCZrXZxYu z!vrV1S9$xh7sZX^O`NS4&ibrADz)c2`X~j}$&Bq5we`AZDnlj5E$UY={!KjF$<&bl zU?c59EWOSn<|G{gIMkXrTq_kE9Ckb0IkERT-9jS?QSd~Rs9_7+@7IQ>@$DvkTYx6q zYtFC2lijOFl>+^~28kfDuWLi3l`*P7sXNpPQC3ZZvOK4(z$nwZC>x@b73kMB1kEJO zMzFBK0HXwPa;RhVz(N~ACVF@x^w9D|IHxfYY9rPv3Z8PKwx+Ne_-Sfxx94?)I29pa zHBTtX_;&<=Vz>c>tD}UT=L<-SOHCI84U0}|p9MzC}K7-1VVpYCF7p!p3 zccI=Yyeucnqt>)!8(>QF-WA$Ur#Mi0-`Ljg?Je&-*_V|`w5I^YODef-Wl+#zCd^y& z_xzd5EuivF-B6F_(?qTk_C>yhn_=5g^S23$m&KHPO(l1&3{e)R=?5Y_wuFIod?o^a zQQ1rK{#8!%I>{DN2AIatLYjEKli}G(8i$dgWxBvr3V7p7PBG~{_M^Q(T-HDZhLu`x z6!GZ|GAi0Z@c>70I!zQ8yHHdaiY&cAD38E-3)4+WNMY;WSII-YKpmqW4CA}HF9nQ=NdSjJ$UAR!B05Fmh9$R_jZS*wxgfg!$(=LQ$5~JcS!9ts$J^s#L8*L zTHSBybh8#N=wz{^ofI~w!-^+x==4rfS!AJ2QoPMW>&U`EDHd6AW?`GNelgy4!sjbV z;Q^@~wrN*&dI8aK2W&l0HhX?&inaJVJIb}=F4s18cJ%xD*a&?f(SGT{WGJ;RJS@d* zxR$$odZX5p;}+Q#;W)hgi4?Y9H;p<=3=e_*-&085+cp?Ld5&_XSx57!Sg+C^A$!fC zW`s$?(-bl($+79X$el!@zOyEJ)D&RjJ!mp}Yy=Ze&IY~SaPXjLr@45Qf=%q)O*?Ax zPJlL9Y5S2i)Hn%~D<%QmG`V(`-_v7#N7_sS33*~xQPw!MJbjZ4Cx*3o$GzEWYKt;( zdK00o{|rG1Zqc4OI+Hk=!hMhp;+O4GiGR}G^aoXPTt9~LPeN7`rJO$}Pq&~A`mCaa zc2_lg99(LvYY&2*)K=T_y7&i4)`&{JyPD)92#pzw^-`sL)~K|Qf_knBKjPU8e9R4S zMB~z)w=1>jPp`hPP%5b`kMx~MN~MzG8lsY$%>9xI;4P6!g*J5c8Eu&kZ9{Jw{ZX{% zlWV#!9qlOk_DMC}UyZ7__6V#;O>a#@7|-j}dNOIq$(MgBcJ8khw(}wK(hxb)!_JvS zbDv6$dJbt84I?z&V*iyklPb8v&rjqH`Xt8m6GN#eK1Hyn`%Ks7nRial=F$|n(VQrmfrSman@V2R$UdPt{?WbvIp65=mP+)>I#0+^%;02} z$K}%WC$uE8_PO>Ao60(YZ;YB?!UcW!e7PiCaF6ibLcOC^ToM5Gise}yDvCtul4FuQ zq(6WM6C?{(T=mA>@@Ob|rKuOIITDC@pWrg2H_}{MNx0mDNgT%I{|YvRs)Q=5=Y|tI z8nLc)6+EW1+F1tf8!QF9G=yCiNj3SD5~WU)PpWpHUV3Dr?>gG*E6QMFfA z$=Y`x&6iy=CyYth603Ka%aB#DsIQP+De7CHs?tS%R8<3oPLhSAsv2B4L4F%WGN?_7 z6g3KMaOsS8KYAXLQK*lW!nCB9?W3t%6qZ*Fk&mn;+eKb=;_%7C_Zh9FX3fQ0z-2!R ze~lyw_r8ILvWWzdHKLWQbr~!!)5*_7;P#gv^82HFvov@Rn*;)JhBeF+RX#pMq2+@R zC0gD$U$K0kc|5GiS3W-3B8^C1L-|UVYRFatrk_gi^T`Y>%q&#V{!==a^5GO>%79ks zU=X!qIFU{aOwyo^5{j^&(f%z(&Se8?RMnHnJGxr8yobqmFh3(a@>IRJMDK>4a2D&Q z9X$EJV4rqeqgpxT7~=2varyKwrhDWPDfw_a+DdpRl*-wk&}AiwLHoMwuNII4bT6$2 zvA=x4P*^+TQzIlNgnXn@r4RApvmEduwCj!DySPAPxRoDu=%c_kPIq_J8`FQE&WWN= zRl+`jZ??i+_fVPyG151d(DKq1bdC&V>1|W*yd%vpCGDr8dYDA3qw)x7brQD~B{Q(F z7pDG9I#x%?PVB6GjY%co`SubHcnQjssA_ius*)#%$gRRf2w#6YoABV%EKL>@N;9~+ zGw-x!DwsU*A_J}hu(^cs>91b!ppd}6qgrKkMiA0COZ4TI#HgZF21Io+LdCan*+J;R zeN5I|3uX>YO$CAjJww4M^#VR<1_##$iyvGyj60{MpG#+~$48V#4qi^I)z%eJi;UR@R_N;=;DS$vq`69eRyGBm2%l>(lBDuOTjEz17^vPHY; literal 0 HcmV?d00001 diff --git a/firestore/tests/unit/testdata/update-badchar.textproto b/firestore/tests/unit/testdata/update-badchar.textproto new file mode 100644 index 000000000000..656ff53b686a --- /dev/null +++ b/firestore/tests/unit/testdata/update-badchar.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The keys of the data given to Update are interpreted, unlike those of Create and +# Set. They cannot contain special characters. + +description: "update: invalid character" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a~b\": 1}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-basic.textproto b/firestore/tests/unit/testdata/update-basic.textproto new file mode 100644 index 000000000000..9da316f58ebe --- /dev/null +++ b/firestore/tests/unit/testdata/update-basic.textproto @@ -0,0 +1,30 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A simple call, resulting in a single update operation. + +description: "update: basic" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-complex.textproto b/firestore/tests/unit/testdata/update-complex.textproto new file mode 100644 index 000000000000..1a6d9eff64b9 --- /dev/null +++ b/firestore/tests/unit/testdata/update-complex.textproto @@ -0,0 +1,65 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A call to a write method with complicated input data. + +description: "update: complex" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2.5], \"b\": {\"c\": [\"three\", {\"d\": true}]}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + array_value: < + values: < + integer_value: 1 + > + values: < + double_value: 2.5 + > + > + > + > + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "c" + value: < + array_value: < + values: < + string_value: "three" + > + values: < + map_value: < + fields: < + key: "d" + value: < + boolean_value: true + > + > + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-del-alone.textproto b/firestore/tests/unit/testdata/update-del-alone.textproto new file mode 100644 index 000000000000..8f558233f037 --- /dev/null +++ b/firestore/tests/unit/testdata/update-del-alone.textproto @@ -0,0 +1,25 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the input data consists solely of Deletes, then the update operation has no +# map, just an update mask. + +description: "update: Delete alone" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": \"Delete\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-del-dot.textproto b/firestore/tests/unit/testdata/update-del-dot.textproto new file mode 100644 index 000000000000..c0ebdf61f787 --- /dev/null +++ b/firestore/tests/unit/testdata/update-del-dot.textproto @@ -0,0 +1,46 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# After expanding top-level dotted fields, fields with Delete values are pruned +# from the output data, but appear in the update mask. + +description: "update: Delete with a dotted field" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b.c\": \"Delete\", \"b.d\": 2}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "d" + value: < + integer_value: 2 + > + > + > + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b.c" + field_paths: "b.d" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-del-nested.textproto b/firestore/tests/unit/testdata/update-del-nested.textproto new file mode 100644 index 000000000000..ed102697e682 --- /dev/null +++ b/firestore/tests/unit/testdata/update-del-nested.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a top-level key. + +description: "update: Delete cannot be nested" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": {\"b\": \"Delete\"}}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-del-noarray-nested.textproto b/firestore/tests/unit/testdata/update-del-noarray-nested.textproto new file mode 100644 index 000000000000..a2eec49661c0 --- /dev/null +++ b/firestore/tests/unit/testdata/update-del-noarray-nested.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "update: Delete cannot be anywhere inside an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"Delete\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-del-noarray.textproto b/firestore/tests/unit/testdata/update-del-noarray.textproto new file mode 100644 index 000000000000..a7eea87ef49f --- /dev/null +++ b/firestore/tests/unit/testdata/update-del-noarray.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "update: Delete cannot be in an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"Delete\"]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-del.textproto b/firestore/tests/unit/testdata/update-del.textproto new file mode 100644 index 000000000000..ec443e6c7035 --- /dev/null +++ b/firestore/tests/unit/testdata/update-del.textproto @@ -0,0 +1,32 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a field's value is the Delete sentinel, then it doesn't appear in the update +# data, but does in the mask. + +description: "update: Delete" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"Delete\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-exists-precond.textproto b/firestore/tests/unit/testdata/update-exists-precond.textproto new file mode 100644 index 000000000000..3c6fef4e2263 --- /dev/null +++ b/firestore/tests/unit/testdata/update-exists-precond.textproto @@ -0,0 +1,14 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update method does not support an explicit exists precondition. + +description: "update: Exists precondition is invalid" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + exists: true + > + json_data: "{\"a\": 1}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-fp-empty-component.textproto b/firestore/tests/unit/testdata/update-fp-empty-component.textproto new file mode 100644 index 000000000000..c3bceff3e4b8 --- /dev/null +++ b/firestore/tests/unit/testdata/update-fp-empty-component.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Empty fields are not allowed. + +description: "update: empty field path component" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a..b\": 1}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-no-paths.textproto b/firestore/tests/unit/testdata/update-no-paths.textproto new file mode 100644 index 000000000000..b524b7483f79 --- /dev/null +++ b/firestore/tests/unit/testdata/update-no-paths.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# It is a client-side error to call Update with empty data. + +description: "update: no paths" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-basic.textproto b/firestore/tests/unit/testdata/update-paths-basic.textproto new file mode 100644 index 000000000000..515f29d6af02 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-basic.textproto @@ -0,0 +1,33 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A simple call, resulting in a single update operation. + +description: "update-paths: basic" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "1" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-complex.textproto b/firestore/tests/unit/testdata/update-paths-complex.textproto new file mode 100644 index 000000000000..38a832239f5c --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-complex.textproto @@ -0,0 +1,72 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A call to a write method with complicated input data. + +description: "update-paths: complex" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "[1, 2.5]" + json_values: "{\"c\": [\"three\", {\"d\": true}]}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + array_value: < + values: < + integer_value: 1 + > + values: < + double_value: 2.5 + > + > + > + > + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "c" + value: < + array_value: < + values: < + string_value: "three" + > + values: < + map_value: < + fields: < + key: "d" + value: < + boolean_value: true + > + > + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-del-alone.textproto b/firestore/tests/unit/testdata/update-paths-del-alone.textproto new file mode 100644 index 000000000000..5dbb787de94b --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-del-alone.textproto @@ -0,0 +1,28 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the input data consists solely of Deletes, then the update operation has no +# map, just an update mask. + +description: "update-paths: Delete alone" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "\"Delete\"" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-del-nested.textproto b/firestore/tests/unit/testdata/update-paths-del-nested.textproto new file mode 100644 index 000000000000..bdf65fb0ad91 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-del-nested.textproto @@ -0,0 +1,14 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a top-level key. + +description: "update-paths: Delete cannot be nested" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "{\"b\": \"Delete\"}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-del-noarray-nested.textproto b/firestore/tests/unit/testdata/update-paths-del-noarray-nested.textproto new file mode 100644 index 000000000000..d3da15dda80e --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-del-noarray-nested.textproto @@ -0,0 +1,16 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "update-paths: Delete cannot be anywhere inside an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, {\"b\": \"Delete\"}]" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-del-noarray.textproto b/firestore/tests/unit/testdata/update-paths-del-noarray.textproto new file mode 100644 index 000000000000..9ebdd0945198 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-del-noarray.textproto @@ -0,0 +1,16 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Delete sentinel must be the value of a field. Deletes are implemented by +# turning the path to the Delete sentinel into a FieldPath, and FieldPaths do not +# support array indexing. + +description: "update-paths: Delete cannot be in an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, 2, \"Delete\"]" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-del.textproto b/firestore/tests/unit/testdata/update-paths-del.textproto new file mode 100644 index 000000000000..5197a78488f0 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-del.textproto @@ -0,0 +1,39 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If a field's value is the Delete sentinel, then it doesn't appear in the update +# data, but does in the mask. + +description: "update-paths: Delete" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "1" + json_values: "\"Delete\"" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-exists-precond.textproto b/firestore/tests/unit/testdata/update-paths-exists-precond.textproto new file mode 100644 index 000000000000..084e07726ee0 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-exists-precond.textproto @@ -0,0 +1,17 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update method does not support an explicit exists precondition. + +description: "update-paths: Exists precondition is invalid" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + exists: true + > + field_paths: < + field: "a" + > + json_values: "1" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-fp-del.textproto b/firestore/tests/unit/testdata/update-paths-fp-del.textproto new file mode 100644 index 000000000000..5c92aeb8ca8b --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-fp-del.textproto @@ -0,0 +1,47 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If one nested field is deleted, and another isn't, preserve the second. + +description: "update-paths: field paths with delete" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "foo" + field: "bar" + > + field_paths: < + field: "foo" + field: "delete" + > + json_values: "1" + json_values: "\"Delete\"" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "foo" + value: < + map_value: < + fields: < + key: "bar" + value: < + integer_value: 1 + > + > + > + > + > + > + update_mask: < + field_paths: "foo.bar" + field_paths: "foo.delete" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-fp-dup.textproto b/firestore/tests/unit/testdata/update-paths-fp-dup.textproto new file mode 100644 index 000000000000..fedbd3aab99d --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-fp-dup.textproto @@ -0,0 +1,22 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The same field cannot occur more than once. + +description: "update-paths: duplicate field path" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + field_paths: < + field: "a" + > + json_values: "1" + json_values: "2" + json_values: "3" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-fp-empty-component.textproto b/firestore/tests/unit/testdata/update-paths-fp-empty-component.textproto new file mode 100644 index 000000000000..7a5df25b7ed2 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-fp-empty-component.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Empty fields are not allowed. + +description: "update-paths: empty field path component" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "*" + field: "" + > + json_values: "1" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-fp-empty.textproto b/firestore/tests/unit/testdata/update-paths-fp-empty.textproto new file mode 100644 index 000000000000..311e309326d1 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-fp-empty.textproto @@ -0,0 +1,13 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A FieldPath of length zero is invalid. + +description: "update-paths: empty field path" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + > + json_values: "1" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-fp-multi.textproto b/firestore/tests/unit/testdata/update-paths-fp-multi.textproto new file mode 100644 index 000000000000..9ba41e39812c --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-fp-multi.textproto @@ -0,0 +1,42 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The UpdatePaths or equivalent method takes a list of FieldPaths. Each FieldPath +# is a sequence of uninterpreted path components. + +description: "update-paths: multiple-element field path" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + field: "b" + > + json_values: "1" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + map_value: < + fields: < + key: "b" + value: < + integer_value: 1 + > + > + > + > + > + > + update_mask: < + field_paths: "a.b" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-fp-nosplit.textproto b/firestore/tests/unit/testdata/update-paths-fp-nosplit.textproto new file mode 100644 index 000000000000..516495266707 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-fp-nosplit.textproto @@ -0,0 +1,48 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# FieldPath components are not split on dots. + +description: "update-paths: FieldPath elements are not split on dots" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a.b" + field: "f.g" + > + json_values: "{\"n.o\": 7}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a.b" + value: < + map_value: < + fields: < + key: "f.g" + value: < + map_value: < + fields: < + key: "n.o" + value: < + integer_value: 7 + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "`a.b`.`f.g`" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-no-paths.textproto b/firestore/tests/unit/testdata/update-paths-no-paths.textproto new file mode 100644 index 000000000000..d9939dc94701 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-no-paths.textproto @@ -0,0 +1,10 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# It is a client-side error to call Update with empty data. + +description: "update-paths: no paths" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-prefix-1.textproto b/firestore/tests/unit/testdata/update-paths-prefix-1.textproto new file mode 100644 index 000000000000..1710b91097e3 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-prefix-1.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another. + +description: "update-paths: prefix #1" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + field: "b" + > + field_paths: < + field: "a" + > + json_values: "1" + json_values: "2" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-prefix-2.textproto b/firestore/tests/unit/testdata/update-paths-prefix-2.textproto new file mode 100644 index 000000000000..be78ab58a63b --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-prefix-2.textproto @@ -0,0 +1,19 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another. + +description: "update-paths: prefix #2" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "a" + field: "b" + > + json_values: "1" + json_values: "2" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-prefix-3.textproto b/firestore/tests/unit/testdata/update-paths-prefix-3.textproto new file mode 100644 index 000000000000..b8a84c9d1f80 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-prefix-3.textproto @@ -0,0 +1,20 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another, even if the values +# could in principle be combined. + +description: "update-paths: prefix #3" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "a" + field: "d" + > + json_values: "{\"b\": 1}" + json_values: "2" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-special-chars.textproto b/firestore/tests/unit/testdata/update-paths-special-chars.textproto new file mode 100644 index 000000000000..51cb33b31268 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-special-chars.textproto @@ -0,0 +1,53 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# FieldPaths can contain special characters. + +description: "update-paths: special characters" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "*" + field: "~" + > + field_paths: < + field: "*" + field: "`" + > + json_values: "1" + json_values: "2" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "*" + value: < + map_value: < + fields: < + key: "`" + value: < + integer_value: 2 + > + > + fields: < + key: "~" + value: < + integer_value: 1 + > + > + > + > + > + > + update_mask: < + field_paths: "`*`.`\\``" + field_paths: "`*`.`~`" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-st-alone.textproto b/firestore/tests/unit/testdata/update-paths-st-alone.textproto new file mode 100644 index 000000000000..abc44f55b463 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-st-alone.textproto @@ -0,0 +1,29 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ServerTimestamps, then no update operation +# should be produced. + +description: "update-paths: ServerTimestamp alone" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "\"ServerTimestamp\"" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + set_to_server_value: REQUEST_TIME + > + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-st-multi.textproto b/firestore/tests/unit/testdata/update-paths-st-multi.textproto new file mode 100644 index 000000000000..b0b7df17d836 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-st-multi.textproto @@ -0,0 +1,56 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ServerTimestamp field. Since all the +# ServerTimestamp fields are removed, the only field in the update is "a". + +description: "update-paths: multiple ServerTimestamp fields" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + field_paths: < + field: "c" + > + json_values: "1" + json_values: "\"ServerTimestamp\"" + json_values: "{\"d\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "c" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c.d" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-st-nested.textproto b/firestore/tests/unit/testdata/update-paths-st-nested.textproto new file mode 100644 index 000000000000..3077368318e8 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-st-nested.textproto @@ -0,0 +1,49 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A ServerTimestamp value can occur at any depth. In this case, the transform +# applies to the field path "b.c". Since "c" is removed from the update, "b" +# becomes empty, so it is also removed from the update. + +description: "update-paths: nested ServerTimestamp field" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "1" + json_values: "{\"c\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-st-noarray-nested.textproto b/firestore/tests/unit/testdata/update-paths-st-noarray-nested.textproto new file mode 100644 index 000000000000..2c2cb89b62f4 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-st-noarray-nested.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. + +description: "update-paths: ServerTimestamp cannot be anywhere inside an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, {\"b\": \"ServerTimestamp\"}]" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-st-noarray.textproto b/firestore/tests/unit/testdata/update-paths-st-noarray.textproto new file mode 100644 index 000000000000..a2baa66f5762 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-st-noarray.textproto @@ -0,0 +1,15 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. Firestore transforms +# don't support array indexing. + +description: "update-paths: ServerTimestamp cannot be in an array value" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + json_values: "[1, 2, \"ServerTimestamp\"]" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-paths-st.textproto b/firestore/tests/unit/testdata/update-paths-st.textproto new file mode 100644 index 000000000000..40634c165864 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-st.textproto @@ -0,0 +1,49 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with the special ServerTimestamp sentinel is removed from the data in the +# update operation. Instead it appears in a separate Transform operation. Note +# that in these tests, the string "ServerTimestamp" should be replaced with the +# special ServerTimestamp value. + +description: "update-paths: ServerTimestamp with data" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + field_paths: < + field: "a" + > + field_paths: < + field: "b" + > + json_values: "1" + json_values: "\"ServerTimestamp\"" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-paths-uptime.textproto b/firestore/tests/unit/testdata/update-paths-uptime.textproto new file mode 100644 index 000000000000..7a15874bea64 --- /dev/null +++ b/firestore/tests/unit/testdata/update-paths-uptime.textproto @@ -0,0 +1,40 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update call supports a last-update-time precondition. + +description: "update-paths: last-update-time precondition" +update_paths: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + update_time: < + seconds: 42 + > + > + field_paths: < + field: "a" + > + json_values: "1" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + update_time: < + seconds: 42 + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-prefix-1.textproto b/firestore/tests/unit/testdata/update-prefix-1.textproto new file mode 100644 index 000000000000..e5c895e73b49 --- /dev/null +++ b/firestore/tests/unit/testdata/update-prefix-1.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another. + +description: "update: prefix #1" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a.b\": 1, \"a\": 2}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-prefix-2.textproto b/firestore/tests/unit/testdata/update-prefix-2.textproto new file mode 100644 index 000000000000..4870176186a7 --- /dev/null +++ b/firestore/tests/unit/testdata/update-prefix-2.textproto @@ -0,0 +1,11 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another. + +description: "update: prefix #2" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"a.b\": 2}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-prefix-3.textproto b/firestore/tests/unit/testdata/update-prefix-3.textproto new file mode 100644 index 000000000000..0c03b0d6b845 --- /dev/null +++ b/firestore/tests/unit/testdata/update-prefix-3.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In the input data, one field cannot be a prefix of another, even if the values +# could in principle be combined. + +description: "update: prefix #3" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": {\"b\": 1}, \"a.d\": 2}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-quoting.textproto b/firestore/tests/unit/testdata/update-quoting.textproto new file mode 100644 index 000000000000..20e530a7609a --- /dev/null +++ b/firestore/tests/unit/testdata/update-quoting.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# In a field path, any component beginning with a non-letter or underscore is +# quoted. + +description: "update: non-letter starting chars are quoted, except underscore" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"_0.1.+2\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "_0" + value: < + map_value: < + fields: < + key: "1" + value: < + map_value: < + fields: < + key: "+2" + value: < + integer_value: 1 + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "_0.`1`.`+2`" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-split-top-level.textproto b/firestore/tests/unit/testdata/update-split-top-level.textproto new file mode 100644 index 000000000000..d1b0ca0da163 --- /dev/null +++ b/firestore/tests/unit/testdata/update-split-top-level.textproto @@ -0,0 +1,45 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update method splits only top-level keys at dots. Keys at other levels are +# taken literally. + +description: "update: Split on dots for top-level keys only" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"h.g\": {\"j.k\": 6}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "h" + value: < + map_value: < + fields: < + key: "g" + value: < + map_value: < + fields: < + key: "j.k" + value: < + integer_value: 6 + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "h.g" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-split.textproto b/firestore/tests/unit/testdata/update-split.textproto new file mode 100644 index 000000000000..b96fd6a4f70a --- /dev/null +++ b/firestore/tests/unit/testdata/update-split.textproto @@ -0,0 +1,44 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update method splits top-level keys at dots. + +description: "update: split on dots" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a.b.c\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + map_value: < + fields: < + key: "b" + value: < + map_value: < + fields: < + key: "c" + value: < + integer_value: 1 + > + > + > + > + > + > + > + > + > + update_mask: < + field_paths: "a.b.c" + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-st-alone.textproto b/firestore/tests/unit/testdata/update-st-alone.textproto new file mode 100644 index 000000000000..0d5ab6e9fbaf --- /dev/null +++ b/firestore/tests/unit/testdata/update-st-alone.textproto @@ -0,0 +1,26 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# If the only values in the input are ServerTimestamps, then no update operation +# should be produced. + +description: "update: ServerTimestamp alone" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a" + set_to_server_value: REQUEST_TIME + > + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-st-dot.textproto b/firestore/tests/unit/testdata/update-st-dot.textproto new file mode 100644 index 000000000000..19d4d18432e7 --- /dev/null +++ b/firestore/tests/unit/testdata/update-st-dot.textproto @@ -0,0 +1,27 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# Like other uses of ServerTimestamp, the data is pruned and the field does not +# appear in the update mask, because it is in the transform. In this case An +# update operation is produced just to hold the precondition. + +description: "update: ServerTimestamp with dotted field" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a.b.c\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "a.b.c" + set_to_server_value: REQUEST_TIME + > + > + current_document: < + exists: true + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-st-multi.textproto b/firestore/tests/unit/testdata/update-st-multi.textproto new file mode 100644 index 000000000000..0434cb59ab5a --- /dev/null +++ b/firestore/tests/unit/testdata/update-st-multi.textproto @@ -0,0 +1,49 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A document can have more than one ServerTimestamp field. Since all the +# ServerTimestamp fields are removed, the only field in the update is "a". + +# b is not in the mask because it will be set in the transform. c must be in the +# mask: it should be replaced entirely. The transform will set c.d to the +# timestamp, but the update will delete the rest of c. + +description: "update: multiple ServerTimestamp fields" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\", \"c\": {\"d\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "c" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + field_transforms: < + field_path: "c.d" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-st-nested.textproto b/firestore/tests/unit/testdata/update-st-nested.textproto new file mode 100644 index 000000000000..f79d9c6a072a --- /dev/null +++ b/firestore/tests/unit/testdata/update-st-nested.textproto @@ -0,0 +1,42 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A ServerTimestamp value can occur at any depth. In this case, the transform +# applies to the field path "b.c". Since "c" is removed from the update, "b" +# becomes empty, so it is also removed from the update. + +description: "update: nested ServerTimestamp field" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": {\"c\": \"ServerTimestamp\"}}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + field_paths: "b" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b.c" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-st-noarray-nested.textproto b/firestore/tests/unit/testdata/update-st-noarray-nested.textproto new file mode 100644 index 000000000000..2939dd646436 --- /dev/null +++ b/firestore/tests/unit/testdata/update-st-noarray-nested.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# There cannot be an array value anywhere on the path from the document root to +# the ServerTimestamp sentinel. Firestore transforms don't support array indexing. + +description: "update: ServerTimestamp cannot be anywhere inside an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, {\"b\": \"ServerTimestamp\"}]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-st-noarray.textproto b/firestore/tests/unit/testdata/update-st-noarray.textproto new file mode 100644 index 000000000000..f3879cdf2260 --- /dev/null +++ b/firestore/tests/unit/testdata/update-st-noarray.textproto @@ -0,0 +1,12 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The ServerTimestamp sentinel must be the value of a field. Firestore transforms +# don't support array indexing. + +description: "update: ServerTimestamp cannot be in an array value" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": [1, 2, \"ServerTimestamp\"]}" + is_error: true +> diff --git a/firestore/tests/unit/testdata/update-st.textproto b/firestore/tests/unit/testdata/update-st.textproto new file mode 100644 index 000000000000..12045a9220dc --- /dev/null +++ b/firestore/tests/unit/testdata/update-st.textproto @@ -0,0 +1,42 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# A key with the special ServerTimestamp sentinel is removed from the data in the +# update operation. Instead it appears in a separate Transform operation. Note +# that in these tests, the string "ServerTimestamp" should be replaced with the +# special ServerTimestamp value. + +description: "update: ServerTimestamp with data" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + json_data: "{\"a\": 1, \"b\": \"ServerTimestamp\"}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + exists: true + > + > + writes: < + transform: < + document: "projects/projectID/databases/(default)/documents/C/d" + field_transforms: < + field_path: "b" + set_to_server_value: REQUEST_TIME + > + > + > + > +> diff --git a/firestore/tests/unit/testdata/update-uptime.textproto b/firestore/tests/unit/testdata/update-uptime.textproto new file mode 100644 index 000000000000..66119ac61c13 --- /dev/null +++ b/firestore/tests/unit/testdata/update-uptime.textproto @@ -0,0 +1,37 @@ +# DO NOT MODIFY. This file was generated by +# github.com/GoogleCloudPlatform/google-cloud-common/testing/firestore/cmd/generate-firestore-tests/generate-firestore-tests.go. + +# The Update call supports a last-update-time precondition. + +description: "update: last-update-time precondition" +update: < + doc_ref_path: "projects/projectID/databases/(default)/documents/C/d" + precondition: < + update_time: < + seconds: 42 + > + > + json_data: "{\"a\": 1}" + request: < + database: "projects/projectID/databases/(default)" + writes: < + update: < + name: "projects/projectID/databases/(default)/documents/C/d" + fields: < + key: "a" + value: < + integer_value: 1 + > + > + > + update_mask: < + field_paths: "a" + > + current_document: < + update_time: < + seconds: 42 + > + > + > + > +> From 71e5d4bf94745580834b86c3e92ac4186c3115c0 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 21 May 2018 16:25:37 -0400 Subject: [PATCH 18/75] Begin deprecation process for google-cloud umbrella package (#5321) * Begin deprecation process for google-cloud umbrella package --- README.rst | 12 ------- legacy/google-cloud/setup-README.rst | 40 ++++++++++++++++++++++ setup.cfg => legacy/google-cloud/setup.cfg | 0 setup.py => legacy/google-cloud/setup.py | 16 ++++++--- nox.py | 2 +- 5 files changed, 53 insertions(+), 17 deletions(-) create mode 100644 legacy/google-cloud/setup-README.rst rename setup.cfg => legacy/google-cloud/setup.cfg (100%) rename setup.py => legacy/google-cloud/setup.py (84%) diff --git a/README.rst b/README.rst index 2813122cf205..905c75d6accf 100644 --- a/README.rst +++ b/README.rst @@ -121,18 +121,6 @@ If you need support for other Google APIs, check out the .. _Google APIs Python Client library: https://github.com/google/google-api-python-client -Quick Start ------------ - -.. code-block:: console - - $ pip install --upgrade google-cloud - -For more information on setting up your Python development environment, -such as installing ``pip`` and ``virtualenv`` on your system, please refer -to `Python Development Environment Setup Guide`_ for Google Cloud Platform. - -.. _Python Development Environment Setup Guide: https://cloud.google.com/python/setup Example Applications -------------------- diff --git a/legacy/google-cloud/setup-README.rst b/legacy/google-cloud/setup-README.rst new file mode 100644 index 000000000000..13da52c2335d --- /dev/null +++ b/legacy/google-cloud/setup-README.rst @@ -0,0 +1,40 @@ +Google Cloud Python Client +========================== + + Python idiomatic client for `Google Cloud Platform`_ services. + + WARNING: The google-cloud Python package is deprecated. On June 18, 2018, + this package will no longer install any other packages. Please install the + `product-specific google-cloud-* packages`__ needed for your application. + +.. __: https://github.com/GoogleCloudPlatform/google-cloud-python#google-cloud-python-client + +.. _Google Cloud Platform: https://cloud.google.com/ + +|pypi| |circleci| |appveyor| |coverage| |versions| + +- `Homepage`_ +- `API Documentation`_ +- `Read The Docs Documentation`_ + +.. _Homepage: https://googlecloudplatform.github.io/google-cloud-python/ +.. _API Documentation: https://googlecloudplatform.github.io/google-cloud-python/latest/ +.. _Read The Docs Documentation: https://google-cloud-python.readthedocs.io/en/latest/ + +License +------- + +Apache 2.0 - See `the LICENSE`_ for more information. + +.. _the LICENSE: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/LICENSE + +.. |circleci| image:: https://circleci.com/gh/GoogleCloudPlatform/google-cloud-python.svg?style=shield + :target: https://circleci.com/gh/GoogleCloudPlatform/google-cloud-python +.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/github/googlecloudplatform/google-cloud-python?branch=master&svg=true + :target: https://ci.appveyor.com/project/GoogleCloudPlatform/google-cloud-python +.. |coverage| image:: https://coveralls.io/repos/GoogleCloudPlatform/google-cloud-python/badge.svg?branch=master + :target: https://coveralls.io/r/GoogleCloudPlatform/google-cloud-python?branch=master +.. |pypi| image:: https://img.shields.io/pypi/v/google-cloud.svg + :target: https://pypi.org/project/google-cloud/ +.. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud.svg + :target: https://pypi.org/project/google-cloud/ diff --git a/setup.cfg b/legacy/google-cloud/setup.cfg similarity index 100% rename from setup.cfg rename to legacy/google-cloud/setup.cfg diff --git a/setup.py b/legacy/google-cloud/setup.py similarity index 84% rename from setup.py rename to legacy/google-cloud/setup.py index 94ce3ced83c8..7528d557594c 100644 --- a/setup.py +++ b/legacy/google-cloud/setup.py @@ -13,13 +13,13 @@ # limitations under the License. import os +import logging from setuptools import setup - PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: +with open(os.path.join(PACKAGE_ROOT, 'setup-README.rst')) as file_obj: README = file_obj.read() # NOTE: This is duplicated throughout and we should try to @@ -34,7 +34,7 @@ 'include_package_data': True, 'zip_safe': False, 'classifiers': [ - 'Development Status :: 4 - Beta', + 'Development Status :: 7 - Inactive', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', @@ -76,9 +76,17 @@ setup( name='google-cloud', - version='0.32.1.dev1', + version='0.33.0', description='API Client library for Google Cloud', long_description=README, install_requires=REQUIREMENTS, **SETUP_BASE ) + +warning = "WARNING: The google-cloud Python package is deprecated. On " \ + "June 18, 2018, this package will no longer install any other " \ + "packages. Please install the product-specific google-cloud-* " \ + "packages needed for your application. See " \ + "https://github.com/GoogleCloudPlatform/google-cloud-python." + +logging.warn(warning) diff --git a/nox.py b/nox.py index 663b1cb7c083..d86b0edb6995 100644 --- a/nox.py +++ b/nox.py @@ -46,4 +46,4 @@ def lint_setup_py(session): session.install('docutils', 'Pygments') session.run( - 'python', 'setup.py', 'check', '--restructuredtext', '--strict') + 'python', 'legacy/google-cloud/setup.py', 'check', '--restructuredtext', '--strict') From b3efe3beb3a9bf7d406aa227c29868d3a61c7012 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 21 May 2018 18:12:01 -0400 Subject: [PATCH 19/75] Tasks v2beta2 (#5352) --- tasks/LICENSE | 201 ++ tasks/MANIFEST.in | 5 + tasks/README.rst | 76 + tasks/docs/conf.py | 311 +++ tasks/docs/gapic/v2beta2/api.rst | 6 + tasks/docs/gapic/v2beta2/types.rst | 5 + tasks/docs/index.rst | 84 + tasks/google/__init__.py | 20 + tasks/google/cloud/__init__.py | 20 + tasks/google/cloud/tasks.py | 25 + tasks/google/cloud/tasks_v2beta2/__init__.py | 31 + .../cloud/tasks_v2beta2/gapic/__init__.py | 0 .../tasks_v2beta2/gapic/cloud_tasks_client.py | 1827 +++++++++++++++++ .../gapic/cloud_tasks_client_config.py | 123 ++ .../google/cloud/tasks_v2beta2/gapic/enums.py | 110 + .../cloud/tasks_v2beta2/proto/__init__.py | 0 .../tasks_v2beta2/proto/cloudtasks_pb2.py | 1676 +++++++++++++++ .../proto/cloudtasks_pb2_grpc.py | 550 +++++ .../cloud/tasks_v2beta2/proto/queue_pb2.py | 548 +++++ .../tasks_v2beta2/proto/queue_pb2_grpc.py | 3 + .../cloud/tasks_v2beta2/proto/target_pb2.py | 679 ++++++ .../tasks_v2beta2/proto/target_pb2_grpc.py | 3 + .../cloud/tasks_v2beta2/proto/task_pb2.py | 393 ++++ .../tasks_v2beta2/proto/task_pb2_grpc.py | 3 + tasks/google/cloud/tasks_v2beta2/types.py | 57 + tasks/nox.py | 52 + tasks/setup.cfg | 2 + tasks/setup.py | 74 + .../test_cloud_tasks_client_v2beta2.py | 735 +++++++ 29 files changed, 7619 insertions(+) create mode 100644 tasks/LICENSE create mode 100644 tasks/MANIFEST.in create mode 100644 tasks/README.rst create mode 100644 tasks/docs/conf.py create mode 100644 tasks/docs/gapic/v2beta2/api.rst create mode 100644 tasks/docs/gapic/v2beta2/types.rst create mode 100644 tasks/docs/index.rst create mode 100644 tasks/google/__init__.py create mode 100644 tasks/google/cloud/__init__.py create mode 100644 tasks/google/cloud/tasks.py create mode 100644 tasks/google/cloud/tasks_v2beta2/__init__.py create mode 100644 tasks/google/cloud/tasks_v2beta2/gapic/__init__.py create mode 100644 tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py create mode 100644 tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client_config.py create mode 100644 tasks/google/cloud/tasks_v2beta2/gapic/enums.py create mode 100644 tasks/google/cloud/tasks_v2beta2/proto/__init__.py create mode 100644 tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py create mode 100644 tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2_grpc.py create mode 100644 tasks/google/cloud/tasks_v2beta2/proto/queue_pb2.py create mode 100644 tasks/google/cloud/tasks_v2beta2/proto/queue_pb2_grpc.py create mode 100644 tasks/google/cloud/tasks_v2beta2/proto/target_pb2.py create mode 100644 tasks/google/cloud/tasks_v2beta2/proto/target_pb2_grpc.py create mode 100644 tasks/google/cloud/tasks_v2beta2/proto/task_pb2.py create mode 100644 tasks/google/cloud/tasks_v2beta2/proto/task_pb2_grpc.py create mode 100644 tasks/google/cloud/tasks_v2beta2/types.py create mode 100644 tasks/nox.py create mode 100644 tasks/setup.cfg create mode 100644 tasks/setup.py create mode 100644 tasks/tests/unit/gapic/v2beta2/test_cloud_tasks_client_v2beta2.py diff --git a/tasks/LICENSE b/tasks/LICENSE new file mode 100644 index 000000000000..a8ee855de2aa --- /dev/null +++ b/tasks/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/tasks/MANIFEST.in b/tasks/MANIFEST.in new file mode 100644 index 000000000000..9cbf175afe6b --- /dev/null +++ b/tasks/MANIFEST.in @@ -0,0 +1,5 @@ +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/tasks/README.rst b/tasks/README.rst new file mode 100644 index 000000000000..503605b9d7b3 --- /dev/null +++ b/tasks/README.rst @@ -0,0 +1,76 @@ +Python Client for Cloud Tasks API (`Alpha`_) +============================================ + +`Cloud Tasks API`_: Manages the execution of large numbers of distributed requests. Cloud Tasks +is in Alpha. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +.. _Cloud Tasks API: https://cloud.google.com/cloudtasks +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/cloudtasks/usage.html +.. _Product Documentation: https://cloud.google.com/cloudtasks + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Tasks API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Tasks API.: https://cloud.google.com/cloudtasks +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-cloudtasks + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-cloudtasks + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Tasks API + API to see other available methods on the client. +- Read the `Cloud Tasks API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud Tasks API Product documentation: https://cloud.google.com/cloudtasks +.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/tasks/docs/conf.py b/tasks/docs/conf.py new file mode 100644 index 000000000000..b2ccdea214f1 --- /dev/null +++ b/tasks/docs/conf.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-cloudtasks documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +__version__ = '0.1.0' + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', +] + +# autodoc/autosummary flags +autoclass_content = 'both' +autodoc_default_flags = ['members'] +autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'google-cloud-cloudtasks' +copyright = u'2017, Google' +author = u'Google APIs' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = '.'.join(release.split('.')[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'google-cloud-cloudtasks-doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', + + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'google-cloud-cloudtasks.tex', + u'google-cloud-cloudtasks Documentation', author, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, 'google-cloud-cloudtasks', + u'google-cloud-cloudtasks Documentation', [author], 1)] + +# If true, show URL addresses after external links. +#man_show_urls = False + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'google-cloud-cloudtasks', + u'google-cloud-cloudtasks Documentation', author, + 'google-cloud-cloudtasks', + 'GAPIC library for the {metadata.shortName} v2beta2 service', 'APIs'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('http://python.readthedocs.org/en/latest/', None), + 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/tasks/docs/gapic/v2beta2/api.rst b/tasks/docs/gapic/v2beta2/api.rst new file mode 100644 index 000000000000..80696d865a81 --- /dev/null +++ b/tasks/docs/gapic/v2beta2/api.rst @@ -0,0 +1,6 @@ +Client for Cloud Tasks API +========================== + +.. automodule:: google.cloud.tasks_v2beta2 + :members: + :inherited-members: \ No newline at end of file diff --git a/tasks/docs/gapic/v2beta2/types.rst b/tasks/docs/gapic/v2beta2/types.rst new file mode 100644 index 000000000000..375c1887069d --- /dev/null +++ b/tasks/docs/gapic/v2beta2/types.rst @@ -0,0 +1,5 @@ +Types for Cloud Tasks API Client +================================ + +.. automodule:: google.cloud.tasks_v2beta2.types + :members: \ No newline at end of file diff --git a/tasks/docs/index.rst b/tasks/docs/index.rst new file mode 100644 index 000000000000..7eb985531f8c --- /dev/null +++ b/tasks/docs/index.rst @@ -0,0 +1,84 @@ +Python Client for Cloud Tasks API (`Alpha`_) +============================================ + +`Cloud Tasks API`_: Manages the execution of large numbers of distributed requests. Cloud Tasks +is in Alpha. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +.. _Cloud Tasks API: https://cloud.google.com/cloudtasks +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/cloudtasks/usage.html +.. _Product Documentation: https://cloud.google.com/cloudtasks + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud Tasks API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud Tasks API.: https://cloud.google.com/cloudtasks +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-cloudtasks + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-cloudtasks + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud Tasks API + API to see other available methods on the client. +- Read the `Cloud Tasks API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud Tasks API Product documentation: https://cloud.google.com/cloudtasks +.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst + +Api Reference +------------- +.. toctree:: + :maxdepth: 2 + + gapic/v2beta2/api + gapic/v2beta2/types \ No newline at end of file diff --git a/tasks/google/__init__.py b/tasks/google/__init__.py new file mode 100644 index 000000000000..855a707300e1 --- /dev/null +++ b/tasks/google/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) \ No newline at end of file diff --git a/tasks/google/cloud/__init__.py b/tasks/google/cloud/__init__.py new file mode 100644 index 000000000000..855a707300e1 --- /dev/null +++ b/tasks/google/cloud/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) \ No newline at end of file diff --git a/tasks/google/cloud/tasks.py b/tasks/google/cloud/tasks.py new file mode 100644 index 000000000000..51985b958788 --- /dev/null +++ b/tasks/google/cloud/tasks.py @@ -0,0 +1,25 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.tasks_v2beta2 import CloudTasksClient +from google.cloud.tasks_v2beta2 import enums +from google.cloud.tasks_v2beta2 import types + +__all__ = ( + 'enums', + 'types', + 'CloudTasksClient', +) diff --git a/tasks/google/cloud/tasks_v2beta2/__init__.py b/tasks/google/cloud/tasks_v2beta2/__init__.py new file mode 100644 index 000000000000..00a035a4386c --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/__init__.py @@ -0,0 +1,31 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.tasks_v2beta2 import types +from google.cloud.tasks_v2beta2.gapic import cloud_tasks_client +from google.cloud.tasks_v2beta2.gapic import enums + + +class CloudTasksClient(cloud_tasks_client.CloudTasksClient): + __doc__ = cloud_tasks_client.CloudTasksClient.__doc__ + enums = enums + + +__all__ = ( + 'enums', + 'types', + 'CloudTasksClient', +) diff --git a/tasks/google/cloud/tasks_v2beta2/gapic/__init__.py b/tasks/google/cloud/tasks_v2beta2/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py b/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py new file mode 100644 index 000000000000..6a7fb430d0d8 --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py @@ -0,0 +1,1827 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Accesses the google.cloud.tasks.v2beta2 CloudTasks API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template +import grpc + +from google.cloud.tasks_v2beta2.gapic import cloud_tasks_client_config +from google.cloud.tasks_v2beta2.gapic import enums +from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2 +from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2_grpc +from google.cloud.tasks_v2beta2.proto import queue_pb2 +from google.cloud.tasks_v2beta2.proto import task_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-cloudtasks', ).version + + +class CloudTasksClient(object): + """ + Cloud Tasks allows developers to manage the execution of background + work in their applications. + """ + + SERVICE_ADDRESS = 'cloudtasks.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ('https://www.googleapis.com/auth/cloud-platform', ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.cloud.tasks.v2beta2.CloudTasks' + + @classmethod + def location_path(cls, project, location): + """Return a fully-qualified location string.""" + return google.api_core.path_template.expand( + 'projects/{project}/locations/{location}', + project=project, + location=location, + ) + + @classmethod + def queue_path(cls, project, location, queue): + """Return a fully-qualified queue string.""" + return google.api_core.path_template.expand( + 'projects/{project}/locations/{location}/queues/{queue}', + project=project, + location=location, + queue=queue, + ) + + @classmethod + def task_path(cls, project, location, queue, task): + """Return a fully-qualified task string.""" + return google.api_core.path_template.expand( + 'projects/{project}/locations/{location}/queues/{queue}/tasks/{task}', + project=project, + location=location, + queue=queue, + task=task, + ) + + def __init__(self, + channel=None, + credentials=None, + client_config=cloud_tasks_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): A dictionary of call options for each + method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__), ) + + # Create the channel. + self.channel = channel + if self.channel is None: + self.channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES, + ) + + # Create the gRPC stubs. + self._cloud_tasks_stub = (cloudtasks_pb2_grpc.CloudTasksStub( + self.channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) + + self._inner_api_calls = {} + + def _intercept_channel(self, *interceptors): + """ Experimental. Bind gRPC interceptors to the gRPC channel. + + Args: + interceptors (*Union[grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamingClientInterceptor, grpc.StreamingUnaryClientInterceptor, grpc.StreamingStreamingClientInterceptor]): + Zero or more gRPC interceptors. Interceptors are given control in the order + they are listed. + Raises: + TypeError: If interceptor does not derive from any of + UnaryUnaryClientInterceptor, + UnaryStreamClientInterceptor, + StreamUnaryClientInterceptor, or + StreamStreamClientInterceptor. + """ + self.channel = grpc.intercept_channel(self.channel, *interceptors) + self._cloud_tasks_stub = (cloudtasks_pb2_grpc.CloudTasksStub( + self.channel)) + self._inner_api_calls.clear() + + # Service calls + def list_queues(self, + parent, + filter_=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists queues. + + Queues are returned in lexicographical order. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_queues(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_queues(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. + + The location name. + For example: ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter_ (str): ``filter`` can be used to specify a subset of queues. Any ``Queue`` + field can be used as a filter and several operators as supported. + For example: ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in + `Stackdriver's Advanced Logs Filters `_. + + Sample filter \"app_engine_http_target: *\". + + Note that using filters might cause fewer queues than the + requested_page size to be returned. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.tasks_v2beta2.types.Queue` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'list_queues' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_queues'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.ListQueues, + default_retry=self._method_configs['ListQueues'].retry, + default_timeout=self._method_configs['ListQueues'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.ListQueuesRequest( + parent=parent, + filter=filter_, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls['list_queues'], + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='queues', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def get_queue(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets a queue. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> response = client.get_queue(name) + + Args: + name (str): Required. + + The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'get_queue' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_queue'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.GetQueue, + default_retry=self._method_configs['GetQueue'].retry, + default_timeout=self._method_configs['GetQueue'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.GetQueueRequest(name=name, ) + return self._inner_api_calls['get_queue']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def create_queue(self, + parent, + queue, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a queue. + + Queues created with this method allow tasks to live for a maximum of 31 + days. After a task is 31 days old, the task will be deleted regardless of whether + it was dispatched or not. + + WARNING: Using this method may have unintended side effects if you are + using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your queues. + Read + `Overview of Queue Management and queue.yaml `_ + before using this method. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize ``queue``: + >>> queue = {} + >>> + >>> response = client.create_queue(parent, queue) + + Args: + parent (str): Required. + + The location name in which the queue will be created. + For example: ``projects/PROJECT_ID/locations/LOCATION_ID`` + + The list of allowed locations can be obtained by calling Cloud + Tasks' implementation of + ``ListLocations``. + queue (Union[dict, ~google.cloud.tasks_v2beta2.types.Queue]): Required. + + The queue to create. + + ``Queue's name`` cannot be the same as an existing queue. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.tasks_v2beta2.types.Queue` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'create_queue' not in self._inner_api_calls: + self._inner_api_calls[ + 'create_queue'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.CreateQueue, + default_retry=self._method_configs['CreateQueue'].retry, + default_timeout=self._method_configs['CreateQueue'] + .timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.CreateQueueRequest( + parent=parent, + queue=queue, + ) + return self._inner_api_calls['create_queue']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def update_queue(self, + queue, + update_mask=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a maximum of 31 + days. After a task is 31 days old, the task will be deleted regardless of whether + it was dispatched or not. + + WARNING: Using this method may have unintended side effects if you are + using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your queues. + Read + `Overview of Queue Management and queue.yaml `_ + before using this method. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> # TODO: Initialize ``queue``: + >>> queue = {} + >>> + >>> response = client.update_queue(queue) + + Args: + queue (Union[dict, ~google.cloud.tasks_v2beta2.types.Queue]): Required. + + The queue to create or update. + + The queue's ``name`` must be specified. + + Output only fields cannot be modified using UpdateQueue. + Any value specified for an output only field will be ignored. + The queue's ``name`` cannot be changed. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.tasks_v2beta2.types.Queue` + update_mask (Union[dict, ~google.cloud.tasks_v2beta2.types.FieldMask]): A mask used to specify which fields of the queue are being updated. + + If empty, then all fields will be updated. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.tasks_v2beta2.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'update_queue' not in self._inner_api_calls: + self._inner_api_calls[ + 'update_queue'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.UpdateQueue, + default_retry=self._method_configs['UpdateQueue'].retry, + default_timeout=self._method_configs['UpdateQueue'] + .timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.UpdateQueueRequest( + queue=queue, + update_mask=update_mask, + ) + return self._inner_api_calls['update_queue']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def delete_queue(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be created + for 7 days. + + WARNING: Using this method may have unintended side effects if you are + using an App Engine ``queue.yaml`` or ``queue.xml`` file to manage your queues. + Read + `Overview of Queue Management and queue.yaml `_ + before using this method. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> client.delete_queue(name) + + Args: + name (str): Required. + + The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'delete_queue' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_queue'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.DeleteQueue, + default_retry=self._method_configs['DeleteQueue'].retry, + default_timeout=self._method_configs['DeleteQueue'] + .timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.DeleteQueueRequest(name=name, ) + self._inner_api_calls['delete_queue']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def purge_queue(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are permanently deleted. + + Purge operations can take up to one minute to take effect. Tasks + might be dispatched before the purge takes effect. A purge is irreversible. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> response = client.purge_queue(name) + + Args: + name (str): Required. + + The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'purge_queue' not in self._inner_api_calls: + self._inner_api_calls[ + 'purge_queue'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.PurgeQueue, + default_retry=self._method_configs['PurgeQueue'].retry, + default_timeout=self._method_configs['PurgeQueue'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.PurgeQueueRequest(name=name, ) + return self._inner_api_calls['purge_queue']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def pause_queue(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + ``ResumeQueue``. Tasks can still be added + when the queue is paused. A queue is paused if its + ``state`` is ``PAUSED``. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> response = client.pause_queue(name) + + Args: + name (str): Required. + + The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'pause_queue' not in self._inner_api_calls: + self._inner_api_calls[ + 'pause_queue'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.PauseQueue, + default_retry=self._method_configs['PauseQueue'].retry, + default_timeout=self._method_configs['PauseQueue'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.PauseQueueRequest(name=name, ) + return self._inner_api_calls['pause_queue']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def resume_queue(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Resume a queue. + + This method resumes a queue after it has been + ``PAUSED`` or + ``DISABLED``. The state of a queue is stored + in the queue's ``state``; after calling this method it + will be set to ``RUNNING``. + + WARNING: Resuming many high-QPS queues at the same time can + lead to target overloading. If you are resuming high-QPS + queues, follow the 500/50/5 pattern described in + `Managing Cloud Tasks Scaling Risks `_. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> response = client.resume_queue(name) + + Args: + name (str): Required. + + The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Queue` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'resume_queue' not in self._inner_api_calls: + self._inner_api_calls[ + 'resume_queue'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.ResumeQueue, + default_retry=self._method_configs['ResumeQueue'].retry, + default_timeout=self._method_configs['ResumeQueue'] + .timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.ResumeQueueRequest(name=name, ) + return self._inner_api_calls['resume_queue']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def get_iam_policy(self, + resource, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets the access control policy for a ``Queue``. + Returns an empty policy if the resource exists and does not have a policy + set. + + Authorization requires the following `Google IAM `_ permission on the + specified resource parent: + + * ``cloudtasks.queues.getIamPolicy`` + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> response = client.get_iam_policy(resource) + + Args: + resource (str): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Policy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'get_iam_policy' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.GetIamPolicy, + default_retry=self._method_configs['GetIamPolicy'].retry, + default_timeout=self._method_configs['GetIamPolicy'] + .timeout, + client_info=self._client_info, + ) + + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + return self._inner_api_calls['get_iam_policy']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def set_iam_policy(self, + resource, + policy, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Sets the access control policy for a ``Queue``. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM permissions yet. + Project-level permissions are required to use the Cloud Console. + + Authorization requires the following `Google IAM `_ permission on the + specified resource parent: + + * ``cloudtasks.queues.setIamPolicy`` + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> # TODO: Initialize ``policy``: + >>> policy = {} + >>> + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (str): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (Union[dict, ~google.cloud.tasks_v2beta2.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.tasks_v2beta2.types.Policy` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Policy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'set_iam_policy' not in self._inner_api_calls: + self._inner_api_calls[ + 'set_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.SetIamPolicy, + default_retry=self._method_configs['SetIamPolicy'].retry, + default_timeout=self._method_configs['SetIamPolicy'] + .timeout, + client_info=self._client_info, + ) + + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, + policy=policy, + ) + return self._inner_api_calls['set_iam_policy']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def test_iam_permissions(self, + resource, + permissions, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Returns permissions that a caller has on a ``Queue``. + If the resource does not exist, this will return an empty set of + permissions, not a ``NOT_FOUND`` error. + + Note: This operation is designed to be used for building permission-aware + UIs and command-line tools, not for authorization checking. This operation + may \"fail open\" without warning. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> # TODO: Initialize ``permissions``: + >>> permissions = [] + >>> + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (str): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.TestIamPermissionsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'test_iam_permissions' not in self._inner_api_calls: + self._inner_api_calls[ + 'test_iam_permissions'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.TestIamPermissions, + default_retry=self._method_configs[ + 'TestIamPermissions'].retry, + default_timeout=self._method_configs['TestIamPermissions'] + .timeout, + client_info=self._client_info, + ) + + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, + permissions=permissions, + ) + return self._inner_api_calls['test_iam_permissions']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def list_tasks(self, + parent, + response_view=None, + order_by=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists the tasks in a queue. + + By default, only the ``BASIC`` view is retrieved + due to performance considerations; + ``response_view`` controls the + subset of information which is returned. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_tasks(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_tasks(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): Required. + + The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view (~google.cloud.tasks_v2beta2.types.View): The response_view specifies which subset of the ``Task`` will be + returned. + + By default response_view is ``BASIC``; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed because + of its large size or because of the sensitivity of data that it + contains. + + Authorization for ``FULL`` requires + ``cloudtasks.tasks.fullView`` `Google IAM `_ permission on the + ``Task`` resource. + order_by (str): Sort order used for the query. The only fields supported for sorting + are ``schedule_time`` and ``pull_message.tag``. All results will be + returned in approximately ascending order. The default ordering is by + ``schedule_time``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.tasks_v2beta2.types.Task` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'list_tasks' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_tasks'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.ListTasks, + default_retry=self._method_configs['ListTasks'].retry, + default_timeout=self._method_configs['ListTasks'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.ListTasksRequest( + parent=parent, + response_view=response_view, + order_by=order_by, + page_size=page_size, + ) + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls['list_tasks'], + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='tasks', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def get_task(self, + name, + response_view=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets a task. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + >>> + >>> response = client.get_task(name) + + Args: + name (str): Required. + + The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (~google.cloud.tasks_v2beta2.types.View): The response_view specifies which subset of the ``Task`` will be + returned. + + By default response_view is ``BASIC``; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed because + of its large size or because of the sensitivity of data that it + contains. + + Authorization for ``FULL`` requires + ``cloudtasks.tasks.fullView`` `Google IAM `_ permission on the + ``Task`` resource. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'get_task' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_task'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.GetTask, + default_retry=self._method_configs['GetTask'].retry, + default_timeout=self._method_configs['GetTask'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.GetTaskRequest( + name=name, + response_view=response_view, + ) + return self._inner_api_calls['get_task']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def create_task(self, + parent, + task, + response_view=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a task and adds it to a queue. + + To add multiple tasks at the same time, use + `HTTP batching `_ + or the batching documentation for your client library, for example + https://developers.google.com/api-client-library/python/guide/batch. + + Tasks cannot be updated after creation; there is no UpdateTask command. + + * For `App Engine queues `_, + the maximum task size is 100KB. + * For `pull queues `_, this + the maximum task size is 1MB. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> # TODO: Initialize ``task``: + >>> task = {} + >>> + >>> response = client.create_task(parent, task) + + Args: + parent (str): Required. + + The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + + The queue must already exist. + task (Union[dict, ~google.cloud.tasks_v2beta2.types.Task]): Required. + + The task to add. + + Task names have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID``. + The user can optionally specify a task ``name``. If a + name is not specified then the system will generate a random + unique task id, which will be set in the task returned in the + ``response``. + + If ``schedule_time`` is not set or is in the + past then Cloud Tasks will set it to the current time. + + Task De-duplication: + + Explicitly specifying a task ID enables task de-duplication. If + a task's ID is identical to that of an existing task or a task + that was deleted or completed recently then the call will fail + with ``ALREADY_EXISTS``. + If the task's queue was created using Cloud Tasks, then another task with + the same name can't be created for ~1hour after the original task was + deleted or completed. If the task's queue was created using queue.yaml or + queue.xml, then another task with the same name can't be created + for ~9days after the original task was deleted or completed. + + Because there is an extra lookup cost to identify duplicate task + names, these ``CreateTask`` calls have significantly + increased latency. Using hashed strings for the task id or for + the prefix of the task id is recommended. Choosing task ids that + are sequential or have sequential prefixes, for example using a + timestamp, causes an increase in latency and error rates in all + task commands. The infrastructure relies on an approximately + uniform distribution of task ids to store and serve tasks + efficiently. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.tasks_v2beta2.types.Task` + response_view (~google.cloud.tasks_v2beta2.types.View): The response_view specifies which subset of the ``Task`` will be + returned. + + By default response_view is ``BASIC``; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed because + of its large size or because of the sensitivity of data that it + contains. + + Authorization for ``FULL`` requires + ``cloudtasks.tasks.fullView`` `Google IAM `_ permission on the + ``Task`` resource. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'create_task' not in self._inner_api_calls: + self._inner_api_calls[ + 'create_task'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.CreateTask, + default_retry=self._method_configs['CreateTask'].retry, + default_timeout=self._method_configs['CreateTask'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.CreateTaskRequest( + parent=parent, + task=task, + response_view=response_view, + ) + return self._inner_api_calls['create_task']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def delete_task(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes a task. + + A task can be deleted if it is scheduled or dispatched. A task + cannot be deleted if it has completed successfully or permanently + failed. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + >>> + >>> client.delete_task(name) + + Args: + name (str): Required. + + The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'delete_task' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_task'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.DeleteTask, + default_retry=self._method_configs['DeleteTask'].retry, + default_timeout=self._method_configs['DeleteTask'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.DeleteTaskRequest(name=name, ) + self._inner_api_calls['delete_task']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def lease_tasks(self, + parent, + lease_duration, + max_tasks=None, + response_view=None, + filter_=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Leases tasks from a pull queue for + ``lease_duration``. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + ``AcknowledgeTask`` after they have + performed the work associated with the task. + + The ``payload`` is intended to store data that + the worker needs to perform the work associated with the task. To + return the payloads in the ``response``, set + ``response_view`` to + ``FULL``. + + A maximum of 10 qps of ``LeaseTasks`` + requests are allowed per + queue. ``RESOURCE_EXHAUSTED`` + is returned when this limit is + exceeded. ``RESOURCE_EXHAUSTED`` + is also returned when + ``max_tasks_dispatched_per_second`` + is exceeded. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + >>> + >>> # TODO: Initialize ``lease_duration``: + >>> lease_duration = {} + >>> + >>> response = client.lease_tasks(parent, lease_duration) + + Args: + parent (str): Required. + + The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + lease_duration (Union[dict, ~google.cloud.tasks_v2beta2.types.Duration]): After the worker has successfully finished the work associated + with the task, the worker must call via + ``AcknowledgeTask`` before the + ``schedule_time``. Otherwise the task will be + returned to a later ``LeaseTasks`` call so + that another worker can retry it. + + The maximum lease duration is 1 week. + ``lease_duration`` will be truncated to the nearest second. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.tasks_v2beta2.types.Duration` + max_tasks (int): The maximum number of tasks to lease. The maximum that can be + requested is 1000. + response_view (~google.cloud.tasks_v2beta2.types.View): The response_view specifies which subset of the ``Task`` will be + returned. + + By default response_view is ``BASIC``; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed because + of its large size or because of the sensitivity of data that it + contains. + + Authorization for ``FULL`` requires + ``cloudtasks.tasks.fullView`` `Google IAM `_ permission on the + ``Task`` resource. + filter_ (str): ``filter`` can be used to specify a subset of tasks to lease. + + When ``filter`` is set to ``tag=`` then the + ``response`` will contain only tasks whose + ``tag`` is equal to ````. ```` must be + less than 500 characters. + + When ``filter`` is set to ``tag_function=oldest_tag()``, only tasks which have + the same tag as the task with the oldest + ``schedule_time`` will be returned. + + Grammar Syntax: + + * ``filter = \"tag=\" tag | \"tag_function=\" function`` + + * ``tag = string`` + + * ``function = \"oldest_tag()\"`` + + The ``oldest_tag()`` function returns tasks which have the same tag as the + oldest task (ordered by schedule time). + + SDK compatibility: Although the SDK allows tags to be either + string or + `bytes `_, + only UTF-8 encoded tags can be used in Cloud Tasks. Tag which + aren't UTF-8 encoded can't be used in the + ``filter`` and the task's + ``tag`` will be displayed as empty in Cloud Tasks. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.LeaseTasksResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'lease_tasks' not in self._inner_api_calls: + self._inner_api_calls[ + 'lease_tasks'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.LeaseTasks, + default_retry=self._method_configs['LeaseTasks'].retry, + default_timeout=self._method_configs['LeaseTasks'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.LeaseTasksRequest( + parent=parent, + lease_duration=lease_duration, + max_tasks=max_tasks, + response_view=response_view, + filter=filter_, + ) + return self._inner_api_calls['lease_tasks']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def acknowledge_task(self, + name, + schedule_time, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Acknowledges a pull task. + + The worker, that is, the entity that + ``leased`` this task must call this method + to indicate that the work associated with the task has finished. + + The worker must acknowledge a task within the + ``lease_duration`` or the lease + will expire and the task will become available to be leased + again. After the task is acknowledged, it will not be returned + by a later ``LeaseTasks``, + ``GetTask``, or + ``ListTasks``. + + To acknowledge multiple tasks at the same time, use + `HTTP batching `_ + or the batching documentation for your client library, for example + https://developers.google.com/api-client-library/python/guide/batch. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + >>> + >>> # TODO: Initialize ``schedule_time``: + >>> schedule_time = {} + >>> + >>> client.acknowledge_task(name, schedule_time) + + Args: + name (str): Required. + + The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (Union[dict, ~google.cloud.tasks_v2beta2.types.Timestamp]): Required. + + The task's current schedule time, available in the + ``schedule_time`` returned by + ``LeaseTasks`` response or + ``RenewLease`` response. This restriction is + to ensure that your worker currently holds the lease. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.tasks_v2beta2.types.Timestamp` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'acknowledge_task' not in self._inner_api_calls: + self._inner_api_calls[ + 'acknowledge_task'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.AcknowledgeTask, + default_retry=self._method_configs[ + 'AcknowledgeTask'].retry, + default_timeout=self._method_configs['AcknowledgeTask'] + .timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.AcknowledgeTaskRequest( + name=name, + schedule_time=schedule_time, + ) + self._inner_api_calls['acknowledge_task']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def renew_lease(self, + name, + schedule_time, + lease_duration, + response_view=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be + returned in the task's ``schedule_time``. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + >>> + >>> # TODO: Initialize ``schedule_time``: + >>> schedule_time = {} + >>> + >>> # TODO: Initialize ``lease_duration``: + >>> lease_duration = {} + >>> + >>> response = client.renew_lease(name, schedule_time, lease_duration) + + Args: + name (str): Required. + + The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (Union[dict, ~google.cloud.tasks_v2beta2.types.Timestamp]): Required. + + The task's current schedule time, available in the + ``schedule_time`` returned by + ``LeaseTasks`` response or + ``RenewLease`` response. This restriction is + to ensure that your worker currently holds the lease. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.tasks_v2beta2.types.Timestamp` + lease_duration (Union[dict, ~google.cloud.tasks_v2beta2.types.Duration]): Required. + + The desired new lease duration, starting from now. + + + The maximum lease duration is 1 week. + ``lease_duration`` will be truncated to the nearest second. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.tasks_v2beta2.types.Duration` + response_view (~google.cloud.tasks_v2beta2.types.View): The response_view specifies which subset of the ``Task`` will be + returned. + + By default response_view is ``BASIC``; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed because + of its large size or because of the sensitivity of data that it + contains. + + Authorization for ``FULL`` requires + ``cloudtasks.tasks.fullView`` `Google IAM `_ permission on the + ``Task`` resource. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'renew_lease' not in self._inner_api_calls: + self._inner_api_calls[ + 'renew_lease'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.RenewLease, + default_retry=self._method_configs['RenewLease'].retry, + default_timeout=self._method_configs['RenewLease'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.RenewLeaseRequest( + name=name, + schedule_time=schedule_time, + lease_duration=lease_duration, + response_view=response_view, + ) + return self._inner_api_calls['renew_lease']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def cancel_lease(self, + name, + schedule_time, + response_view=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its ``schedule_time`` to now. This will + make the task available to be leased to the next caller of + ``LeaseTasks``. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + >>> + >>> # TODO: Initialize ``schedule_time``: + >>> schedule_time = {} + >>> + >>> response = client.cancel_lease(name, schedule_time) + + Args: + name (str): Required. + + The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time (Union[dict, ~google.cloud.tasks_v2beta2.types.Timestamp]): Required. + + The task's current schedule time, available in the + ``schedule_time`` returned by + ``LeaseTasks`` response or + ``RenewLease`` response. This restriction is + to ensure that your worker currently holds the lease. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.tasks_v2beta2.types.Timestamp` + response_view (~google.cloud.tasks_v2beta2.types.View): The response_view specifies which subset of the ``Task`` will be + returned. + + By default response_view is ``BASIC``; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed because + of its large size or because of the sensitivity of data that it + contains. + + Authorization for ``FULL`` requires + ``cloudtasks.tasks.fullView`` `Google IAM `_ permission on the + ``Task`` resource. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'cancel_lease' not in self._inner_api_calls: + self._inner_api_calls[ + 'cancel_lease'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.CancelLease, + default_retry=self._method_configs['CancelLease'].retry, + default_timeout=self._method_configs['CancelLease'] + .timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.CancelLeaseRequest( + name=name, + schedule_time=schedule_time, + response_view=response_view, + ) + return self._inner_api_calls['cancel_lease']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def run_task(self, + name, + response_view=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, even if + the task is already running, the queue has reached its ``RateLimits`` or + is ``PAUSED``. + + This command is meant to be used for manual debugging. For + example, ``RunTask`` can be used to retry a failed + task after a fix has been made or to manually force a task to be + dispatched now. + + The dispatched task is returned. That is, the task that is returned + contains the ``status`` after the task is dispatched but + before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + ``schedule_time`` will be reset to the time that + ``RunTask`` was called plus the retry delay specified + in the queue's ``RetryConfig``. + + ``RunTask`` returns + ``NOT_FOUND`` when it is called on a + task that has already succeeded or permanently failed. + + ``RunTask`` cannot be called on a + ``pull task``. + + Example: + >>> from google.cloud import tasks_v2beta2 + >>> + >>> client = tasks_v2beta2.CloudTasksClient() + >>> + >>> name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + >>> + >>> response = client.run_task(name) + + Args: + name (str): Required. + + The task name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view (~google.cloud.tasks_v2beta2.types.View): The response_view specifies which subset of the ``Task`` will be + returned. + + By default response_view is ``BASIC``; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed because + of its large size or because of the sensitivity of data that it + contains. + + Authorization for ``FULL`` requires + ``cloudtasks.tasks.fullView`` `Google IAM `_ permission on the + ``Task`` resource. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.tasks_v2beta2.types.Task` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if 'run_task' not in self._inner_api_calls: + self._inner_api_calls[ + 'run_task'] = google.api_core.gapic_v1.method.wrap_method( + self._cloud_tasks_stub.RunTask, + default_retry=self._method_configs['RunTask'].retry, + default_timeout=self._method_configs['RunTask'].timeout, + client_info=self._client_info, + ) + + request = cloudtasks_pb2.RunTaskRequest( + name=name, + response_view=response_view, + ) + return self._inner_api_calls['run_task']( + request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client_config.py b/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client_config.py new file mode 100644 index 000000000000..f297cf472fcd --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client_config.py @@ -0,0 +1,123 @@ +config = { + "interfaces": { + "google.cloud.tasks.v2beta2.CloudTasks": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "ListQueues": { + "timeout_millis": 10000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetQueue": { + "timeout_millis": 10000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateQueue": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "UpdateQueue": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteQueue": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "PurgeQueue": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "PauseQueue": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ResumeQueue": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 10000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 10000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListTasks": { + "timeout_millis": 10000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetTask": { + "timeout_millis": 10000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateTask": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteTask": { + "timeout_millis": 10000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "LeaseTasks": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "AcknowledgeTask": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "RenewLease": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "CancelLease": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "RunTask": { + "timeout_millis": 10000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/tasks/google/cloud/tasks_v2beta2/gapic/enums.py b/tasks/google/cloud/tasks_v2beta2/gapic/enums.py new file mode 100644 index 000000000000..b8e63bb2cd77 --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/gapic/enums.py @@ -0,0 +1,110 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class HttpMethod(enum.IntEnum): + """ + The HTTP method used to execute the task. + + Attributes: + HTTP_METHOD_UNSPECIFIED (int): HTTP method unspecified + POST (int): HTTP Post + GET (int): HTTP Get + HEAD (int): HTTP Head + PUT (int): HTTP Put + DELETE (int): HTTP Delete + """ + HTTP_METHOD_UNSPECIFIED = 0 + POST = 1 + GET = 2 + HEAD = 3 + PUT = 4 + DELETE = 5 + + +class Queue(object): + class State(enum.IntEnum): + """ + State of the queue. + + Attributes: + STATE_UNSPECIFIED (int): Unspecified state. + RUNNING (int): The queue is running. Tasks can be dispatched. + + If the queue was created using Cloud Tasks and the queue has + had no activity (method calls or task dispatches) for 30 days, + the queue may take a few minutes to re-activate. Some method + calls may return ``NOT_FOUND`` and + tasks may not be dispatched for a few minutes until the queue + has been re-activated. + PAUSED (int): Tasks are paused by the user. If the queue is paused then Cloud + Tasks will stop delivering tasks from it, but more tasks can + still be added to it by the user. When a pull queue is paused, + all ``LeaseTasks`` calls will return a + ``FAILED_PRECONDITION``. + DISABLED (int): The queue is disabled. + + A queue becomes ``DISABLED`` when + `queue.yaml `_ or + `queue.xml `_ is uploaded + which does not contain the queue. You cannot directly disable a queue. + + When a queue is disabled, tasks can still be added to a queue + but the tasks are not dispatched and + ``LeaseTasks`` calls return a + ``FAILED_PRECONDITION`` error. + + To permanently delete this queue and all of its tasks, call + ``DeleteQueue``. + """ + STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + DISABLED = 3 + + +class Task(object): + class View(enum.IntEnum): + """ + The view specifies a subset of ``Task`` data. + + When a task is returned in a response, not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed because + of its large size or because of the sensitivity of data that it + contains. + + Attributes: + VIEW_UNSPECIFIED (int): Unspecified. Defaults to BASIC. + BASIC (int): The basic view omits fields which can be large or can contain + sensitive data. + + This view does not include the + (``payload in AppEngineHttpRequest`` and + ``payload in PullMessage``). These payloads are + desirable to return only when needed, because they can be large + and because of the sensitivity of the data that you choose to + store in it. + FULL (int): All information is returned. + + Authorization for ``FULL`` requires + ``cloudtasks.tasks.fullView`` `Google IAM `_ + permission on the ``Queue`` resource. + """ + VIEW_UNSPECIFIED = 0 + BASIC = 1 + FULL = 2 diff --git a/tasks/google/cloud/tasks_v2beta2/proto/__init__.py b/tasks/google/cloud/tasks_v2beta2/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py new file mode 100644 index 000000000000..6a5c8dd747e1 --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2.py @@ -0,0 +1,1676 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/tasks_v2beta2/proto/cloudtasks.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.tasks_v2beta2.proto import queue_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2 +from google.cloud.tasks_v2beta2.proto import task_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2 +from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 +from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/tasks_v2beta2/proto/cloudtasks.proto', + package='google.cloud.tasks.v2beta2', + syntax='proto3', + serialized_pb=_b('\n1google/cloud/tasks_v2beta2/proto/cloudtasks.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto\x1a,google/cloud/tasks_v2beta2/proto/queue.proto\x1a+google/cloud/tasks_v2beta2/proto/task.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"Z\n\x11ListQueuesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x12\n\npage_token\x18\x04 \x01(\t\"`\n\x12ListQueuesResponse\x12\x31\n\x06queues\x18\x01 \x03(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"\x1f\n\x0fGetQueueRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"V\n\x12\x43reateQueueRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x30\n\x05queue\x18\x02 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\"w\n\x12UpdateQueueRequest\x12\x30\n\x05queue\x18\x01 \x01(\x0b\x32!.google.cloud.tasks.v2beta2.Queue\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"\"\n\x12\x44\x65leteQueueRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"!\n\x11PurgeQueueRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"!\n\x11PauseQueueRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\"\n\x12ResumeQueueRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x99\x01\n\x10ListTasksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x10\n\x08order_by\x18\x03 \x01(\t\x12\x11\n\tpage_size\x18\x04 \x01(\x05\x12\x12\n\npage_token\x18\x05 \x01(\t\"]\n\x11ListTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"\\\n\x0eGetTaskRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\"\x91\x01\n\x11\x43reateTaskRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12.\n\x04task\x18\x02 \x01(\x0b\x32 .google.cloud.tasks.v2beta2.Task\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\"!\n\x11\x44\x65leteTaskRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xb7\x01\n\x11LeaseTasksRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tmax_tasks\x18\x02 \x01(\x05\x12\x31\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\"E\n\x12LeaseTasksResponse\x12/\n\x05tasks\x18\x01 \x03(\x0b\x32 .google.cloud.tasks.v2beta2.Task\"Y\n\x16\x41\x63knowledgeTaskRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xc5\x01\n\x11RenewLeaseRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\x0elease_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12<\n\rresponse_view\x18\x04 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\"\x93\x01\n\x12\x43\x61ncelLeaseRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\rschedule_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12<\n\rresponse_view\x18\x03 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\"\\\n\x0eRunTaskRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12<\n\rresponse_view\x18\x02 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View2\xf0\x19\n\nCloudTasks\x12\xa4\x01\n\nListQueues\x12-.google.cloud.tasks.v2beta2.ListQueuesRequest\x1a..google.cloud.tasks.v2beta2.ListQueuesResponse\"7\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{parent=projects/*/locations/*}/queues\x12\x93\x01\n\x08GetQueue\x12+.google.cloud.tasks.v2beta2.GetQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\"7\x82\xd3\xe4\x93\x02\x31\x12//v2beta2/{name=projects/*/locations/*/queues/*}\x12\xa0\x01\n\x0b\x43reateQueue\x12..google.cloud.tasks.v2beta2.CreateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\">\x82\xd3\xe4\x93\x02\x38\"//v2beta2/{parent=projects/*/locations/*}/queues:\x05queue\x12\xa6\x01\n\x0bUpdateQueue\x12..google.cloud.tasks.v2beta2.UpdateQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\"D\x82\xd3\xe4\x93\x02>25/v2beta2/{queue.name=projects/*/locations/*/queues/*}:\x05queue\x12\x8e\x01\n\x0b\x44\x65leteQueue\x12..google.cloud.tasks.v2beta2.DeleteQueueRequest\x1a\x16.google.protobuf.Empty\"7\x82\xd3\xe4\x93\x02\x31*//v2beta2/{name=projects/*/locations/*/queues/*}\x12\xa0\x01\n\nPurgeQueue\x12-.google.cloud.tasks.v2beta2.PurgeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\"@\x82\xd3\xe4\x93\x02:\"5/v2beta2/{name=projects/*/locations/*/queues/*}:purge:\x01*\x12\xa0\x01\n\nPauseQueue\x12-.google.cloud.tasks.v2beta2.PauseQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\"@\x82\xd3\xe4\x93\x02:\"5/v2beta2/{name=projects/*/locations/*/queues/*}:pause:\x01*\x12\xa3\x01\n\x0bResumeQueue\x12..google.cloud.tasks.v2beta2.ResumeQueueRequest\x1a!.google.cloud.tasks.v2beta2.Queue\"A\x82\xd3\xe4\x93\x02;\"6/v2beta2/{name=projects/*/locations/*/queues/*}:resume:\x01*\x12\x96\x01\n\x0cGetIamPolicy\x12\".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy\"K\x82\xd3\xe4\x93\x02\x45\"@/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\x01*\x12\x96\x01\n\x0cSetIamPolicy\x12\".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy\"K\x82\xd3\xe4\x93\x02\x45\"@/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\x01*\x12\xbc\x01\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse\"Q\x82\xd3\xe4\x93\x02K\"F/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\x01*\x12\xa9\x01\n\tListTasks\x12,.google.cloud.tasks.v2beta2.ListTasksRequest\x1a-.google.cloud.tasks.v2beta2.ListTasksResponse\"?\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks\x12\x98\x01\n\x07GetTask\x12*.google.cloud.tasks.v2beta2.GetTaskRequest\x1a .google.cloud.tasks.v2beta2.Task\"?\x82\xd3\xe4\x93\x02\x39\x12\x37/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\x12\xa1\x01\n\nCreateTask\x12-.google.cloud.tasks.v2beta2.CreateTaskRequest\x1a .google.cloud.tasks.v2beta2.Task\"B\x82\xd3\xe4\x93\x02<\"7/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:\x01*\x12\x94\x01\n\nDeleteTask\x12-.google.cloud.tasks.v2beta2.DeleteTaskRequest\x1a\x16.google.protobuf.Empty\"?\x82\xd3\xe4\x93\x02\x39*7/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}\x12\xb5\x01\n\nLeaseTasks\x12-.google.cloud.tasks.v2beta2.LeaseTasksRequest\x1a..google.cloud.tasks.v2beta2.LeaseTasksResponse\"H\x82\xd3\xe4\x93\x02\x42\"=/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease:\x01*\x12\xad\x01\n\x0f\x41\x63knowledgeTask\x12\x32.google.cloud.tasks.v2beta2.AcknowledgeTaskRequest\x1a\x16.google.protobuf.Empty\"N\x82\xd3\xe4\x93\x02H\"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge:\x01*\x12\xac\x01\n\nRenewLease\x12-.google.cloud.tasks.v2beta2.RenewLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task\"M\x82\xd3\xe4\x93\x02G\"B/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease:\x01*\x12\xaf\x01\n\x0b\x43\x61ncelLease\x12..google.cloud.tasks.v2beta2.CancelLeaseRequest\x1a .google.cloud.tasks.v2beta2.Task\"N\x82\xd3\xe4\x93\x02H\"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease:\x01*\x12\x9f\x01\n\x07RunTask\x12*.google.cloud.tasks.v2beta2.RunTaskRequest\x1a .google.cloud.tasks.v2beta2.Task\"F\x82\xd3\xe4\x93\x02@\";/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\x01*Bt\n\x1e\x63om.google.cloud.tasks.v2beta2B\x0f\x43loudTasksProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasksb\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.DESCRIPTOR,google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.DESCRIPTOR,google_dot_iam_dot_v1_dot_iam__policy__pb2.DESCRIPTOR,google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) + + + + +_LISTQUEUESREQUEST = _descriptor.Descriptor( + name='ListQueuesRequest', + full_name='google.cloud.tasks.v2beta2.ListQueuesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.tasks.v2beta2.ListQueuesRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='filter', full_name='google.cloud.tasks.v2beta2.ListQueuesRequest.filter', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.cloud.tasks.v2beta2.ListQueuesRequest.page_size', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.cloud.tasks.v2beta2.ListQueuesRequest.page_token', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=390, + serialized_end=480, +) + + +_LISTQUEUESRESPONSE = _descriptor.Descriptor( + name='ListQueuesResponse', + full_name='google.cloud.tasks.v2beta2.ListQueuesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='queues', full_name='google.cloud.tasks.v2beta2.ListQueuesResponse.queues', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.cloud.tasks.v2beta2.ListQueuesResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=482, + serialized_end=578, +) + + +_GETQUEUEREQUEST = _descriptor.Descriptor( + name='GetQueueRequest', + full_name='google.cloud.tasks.v2beta2.GetQueueRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.GetQueueRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=580, + serialized_end=611, +) + + +_CREATEQUEUEREQUEST = _descriptor.Descriptor( + name='CreateQueueRequest', + full_name='google.cloud.tasks.v2beta2.CreateQueueRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.tasks.v2beta2.CreateQueueRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='queue', full_name='google.cloud.tasks.v2beta2.CreateQueueRequest.queue', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=613, + serialized_end=699, +) + + +_UPDATEQUEUEREQUEST = _descriptor.Descriptor( + name='UpdateQueueRequest', + full_name='google.cloud.tasks.v2beta2.UpdateQueueRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='queue', full_name='google.cloud.tasks.v2beta2.UpdateQueueRequest.queue', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.cloud.tasks.v2beta2.UpdateQueueRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=701, + serialized_end=820, +) + + +_DELETEQUEUEREQUEST = _descriptor.Descriptor( + name='DeleteQueueRequest', + full_name='google.cloud.tasks.v2beta2.DeleteQueueRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.DeleteQueueRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=822, + serialized_end=856, +) + + +_PURGEQUEUEREQUEST = _descriptor.Descriptor( + name='PurgeQueueRequest', + full_name='google.cloud.tasks.v2beta2.PurgeQueueRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.PurgeQueueRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=858, + serialized_end=891, +) + + +_PAUSEQUEUEREQUEST = _descriptor.Descriptor( + name='PauseQueueRequest', + full_name='google.cloud.tasks.v2beta2.PauseQueueRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.PauseQueueRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=893, + serialized_end=926, +) + + +_RESUMEQUEUEREQUEST = _descriptor.Descriptor( + name='ResumeQueueRequest', + full_name='google.cloud.tasks.v2beta2.ResumeQueueRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.ResumeQueueRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=928, + serialized_end=962, +) + + +_LISTTASKSREQUEST = _descriptor.Descriptor( + name='ListTasksRequest', + full_name='google.cloud.tasks.v2beta2.ListTasksRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.tasks.v2beta2.ListTasksRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='response_view', full_name='google.cloud.tasks.v2beta2.ListTasksRequest.response_view', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='order_by', full_name='google.cloud.tasks.v2beta2.ListTasksRequest.order_by', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.cloud.tasks.v2beta2.ListTasksRequest.page_size', index=3, + number=4, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.cloud.tasks.v2beta2.ListTasksRequest.page_token', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=965, + serialized_end=1118, +) + + +_LISTTASKSRESPONSE = _descriptor.Descriptor( + name='ListTasksResponse', + full_name='google.cloud.tasks.v2beta2.ListTasksResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tasks', full_name='google.cloud.tasks.v2beta2.ListTasksResponse.tasks', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.cloud.tasks.v2beta2.ListTasksResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1120, + serialized_end=1213, +) + + +_GETTASKREQUEST = _descriptor.Descriptor( + name='GetTaskRequest', + full_name='google.cloud.tasks.v2beta2.GetTaskRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.GetTaskRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='response_view', full_name='google.cloud.tasks.v2beta2.GetTaskRequest.response_view', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1215, + serialized_end=1307, +) + + +_CREATETASKREQUEST = _descriptor.Descriptor( + name='CreateTaskRequest', + full_name='google.cloud.tasks.v2beta2.CreateTaskRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.tasks.v2beta2.CreateTaskRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='task', full_name='google.cloud.tasks.v2beta2.CreateTaskRequest.task', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='response_view', full_name='google.cloud.tasks.v2beta2.CreateTaskRequest.response_view', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1310, + serialized_end=1455, +) + + +_DELETETASKREQUEST = _descriptor.Descriptor( + name='DeleteTaskRequest', + full_name='google.cloud.tasks.v2beta2.DeleteTaskRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.DeleteTaskRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1457, + serialized_end=1490, +) + + +_LEASETASKSREQUEST = _descriptor.Descriptor( + name='LeaseTasksRequest', + full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_tasks', full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest.max_tasks', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='lease_duration', full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='response_view', full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='filter', full_name='google.cloud.tasks.v2beta2.LeaseTasksRequest.filter', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1493, + serialized_end=1676, +) + + +_LEASETASKSRESPONSE = _descriptor.Descriptor( + name='LeaseTasksResponse', + full_name='google.cloud.tasks.v2beta2.LeaseTasksResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='tasks', full_name='google.cloud.tasks.v2beta2.LeaseTasksResponse.tasks', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1678, + serialized_end=1747, +) + + +_ACKNOWLEDGETASKREQUEST = _descriptor.Descriptor( + name='AcknowledgeTaskRequest', + full_name='google.cloud.tasks.v2beta2.AcknowledgeTaskRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.AcknowledgeTaskRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='schedule_time', full_name='google.cloud.tasks.v2beta2.AcknowledgeTaskRequest.schedule_time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1749, + serialized_end=1838, +) + + +_RENEWLEASEREQUEST = _descriptor.Descriptor( + name='RenewLeaseRequest', + full_name='google.cloud.tasks.v2beta2.RenewLeaseRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.RenewLeaseRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='schedule_time', full_name='google.cloud.tasks.v2beta2.RenewLeaseRequest.schedule_time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='lease_duration', full_name='google.cloud.tasks.v2beta2.RenewLeaseRequest.lease_duration', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='response_view', full_name='google.cloud.tasks.v2beta2.RenewLeaseRequest.response_view', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1841, + serialized_end=2038, +) + + +_CANCELLEASEREQUEST = _descriptor.Descriptor( + name='CancelLeaseRequest', + full_name='google.cloud.tasks.v2beta2.CancelLeaseRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.CancelLeaseRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='schedule_time', full_name='google.cloud.tasks.v2beta2.CancelLeaseRequest.schedule_time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='response_view', full_name='google.cloud.tasks.v2beta2.CancelLeaseRequest.response_view', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2041, + serialized_end=2188, +) + + +_RUNTASKREQUEST = _descriptor.Descriptor( + name='RunTaskRequest', + full_name='google.cloud.tasks.v2beta2.RunTaskRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.RunTaskRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='response_view', full_name='google.cloud.tasks.v2beta2.RunTaskRequest.response_view', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2190, + serialized_end=2282, +) + +_LISTQUEUESRESPONSE.fields_by_name['queues'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE +_CREATEQUEUEREQUEST.fields_by_name['queue'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE +_UPDATEQUEUEREQUEST.fields_by_name['queue'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE +_UPDATEQUEUEREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTTASKSREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW +_LISTTASKSRESPONSE.fields_by_name['tasks'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK +_GETTASKREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW +_CREATETASKREQUEST.fields_by_name['task'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK +_CREATETASKREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW +_LEASETASKSREQUEST.fields_by_name['lease_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_LEASETASKSREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW +_LEASETASKSRESPONSE.fields_by_name['tasks'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK +_ACKNOWLEDGETASKREQUEST.fields_by_name['schedule_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_RENEWLEASEREQUEST.fields_by_name['schedule_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_RENEWLEASEREQUEST.fields_by_name['lease_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_RENEWLEASEREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW +_CANCELLEASEREQUEST.fields_by_name['schedule_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_CANCELLEASEREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW +_RUNTASKREQUEST.fields_by_name['response_view'].enum_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK_VIEW +DESCRIPTOR.message_types_by_name['ListQueuesRequest'] = _LISTQUEUESREQUEST +DESCRIPTOR.message_types_by_name['ListQueuesResponse'] = _LISTQUEUESRESPONSE +DESCRIPTOR.message_types_by_name['GetQueueRequest'] = _GETQUEUEREQUEST +DESCRIPTOR.message_types_by_name['CreateQueueRequest'] = _CREATEQUEUEREQUEST +DESCRIPTOR.message_types_by_name['UpdateQueueRequest'] = _UPDATEQUEUEREQUEST +DESCRIPTOR.message_types_by_name['DeleteQueueRequest'] = _DELETEQUEUEREQUEST +DESCRIPTOR.message_types_by_name['PurgeQueueRequest'] = _PURGEQUEUEREQUEST +DESCRIPTOR.message_types_by_name['PauseQueueRequest'] = _PAUSEQUEUEREQUEST +DESCRIPTOR.message_types_by_name['ResumeQueueRequest'] = _RESUMEQUEUEREQUEST +DESCRIPTOR.message_types_by_name['ListTasksRequest'] = _LISTTASKSREQUEST +DESCRIPTOR.message_types_by_name['ListTasksResponse'] = _LISTTASKSRESPONSE +DESCRIPTOR.message_types_by_name['GetTaskRequest'] = _GETTASKREQUEST +DESCRIPTOR.message_types_by_name['CreateTaskRequest'] = _CREATETASKREQUEST +DESCRIPTOR.message_types_by_name['DeleteTaskRequest'] = _DELETETASKREQUEST +DESCRIPTOR.message_types_by_name['LeaseTasksRequest'] = _LEASETASKSREQUEST +DESCRIPTOR.message_types_by_name['LeaseTasksResponse'] = _LEASETASKSRESPONSE +DESCRIPTOR.message_types_by_name['AcknowledgeTaskRequest'] = _ACKNOWLEDGETASKREQUEST +DESCRIPTOR.message_types_by_name['RenewLeaseRequest'] = _RENEWLEASEREQUEST +DESCRIPTOR.message_types_by_name['CancelLeaseRequest'] = _CANCELLEASEREQUEST +DESCRIPTOR.message_types_by_name['RunTaskRequest'] = _RUNTASKREQUEST +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +ListQueuesRequest = _reflection.GeneratedProtocolMessageType('ListQueuesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTQUEUESREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + + Attributes: + parent: + Required. The location name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` + filter: + ``filter`` can be used to specify a subset of queues. Any + [Queue][google.cloud.tasks.v2beta2.Queue] field can be used as + a filter and several operators as supported. For example: + ``<=, <, >=, >, !=, =, :``. The filter syntax is the same as + described in `Stackdriver's Advanced Logs Filters + `__. Sample filter + "app\_engine\_http\_target: \*". Note that using filters + might cause fewer queues than the requested\_page size to be + returned. + page_size: + Requested page size. The maximum page size is 9800. If + unspecified, the page size will be the maximum. Fewer queues + than requested might be returned, even if more queues exist; + use the [next\_page\_token][google.cloud.tasks.v2beta2.ListQue + uesResponse.next\_page\_token] in the response to determine if + more queues exist. + page_token: + A token identifying the page of results to return. To request + the first page results, page\_token must be empty. To request + the next page of results, page\_token must be the value of [ne + xt\_page\_token][google.cloud.tasks.v2beta2.ListQueuesResponse + .next\_page\_token] returned from the previous call to + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] + method. It is an error to switch the value of the + [filter][google.cloud.tasks.v2beta2.ListQueuesRequest.filter] + while iterating through pages. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListQueuesRequest) + )) +_sym_db.RegisterMessage(ListQueuesRequest) + +ListQueuesResponse = _reflection.GeneratedProtocolMessageType('ListQueuesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTQUEUESRESPONSE, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Response message for + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues]. + + + Attributes: + queues: + The list of queues. + next_page_token: + A token to retrieve next page of results. To return the next + page of results, call + [ListQueues][google.cloud.tasks.v2beta2.CloudTasks.ListQueues] + with this value as the [page\_token][google.cloud.tasks.v2beta + 2.ListQueuesRequest.page\_token]. If the next\_page\_token is + empty, there are no more results. The page token is valid for + only 2 hours. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListQueuesResponse) + )) +_sym_db.RegisterMessage(ListQueuesResponse) + +GetQueueRequest = _reflection.GeneratedProtocolMessageType('GetQueueRequest', (_message.Message,), dict( + DESCRIPTOR = _GETQUEUEREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for + [GetQueue][google.cloud.tasks.v2beta2.CloudTasks.GetQueue]. + + + Attributes: + name: + Required. The resource name of the queue. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.GetQueueRequest) + )) +_sym_db.RegisterMessage(GetQueueRequest) + +CreateQueueRequest = _reflection.GeneratedProtocolMessageType('CreateQueueRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATEQUEUEREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for + [CreateQueue][google.cloud.tasks.v2beta2.CloudTasks.CreateQueue]. + + + Attributes: + parent: + Required. The location name in which the queue will be + created. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID`` The list of + allowed locations can be obtained by calling Cloud Tasks' + implementation of [ListLocations][google.cloud.location.Locati + ons.ListLocations]. + queue: + Required. The queue to create. [Queue's + name][google.cloud.tasks.v2beta2.Queue.name] cannot be the + same as an existing queue. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.CreateQueueRequest) + )) +_sym_db.RegisterMessage(CreateQueueRequest) + +UpdateQueueRequest = _reflection.GeneratedProtocolMessageType('UpdateQueueRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATEQUEUEREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for + [UpdateQueue][google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue]. + + + Attributes: + queue: + Required. The queue to create or update. The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] must be + specified. Output only fields cannot be modified using + UpdateQueue. Any value specified for an output only field will + be ignored. The queue's + [name][google.cloud.tasks.v2beta2.Queue.name] cannot be + changed. + update_mask: + A mask used to specify which fields of the queue are being + updated. If empty, then all fields will be updated. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.UpdateQueueRequest) + )) +_sym_db.RegisterMessage(UpdateQueueRequest) + +DeleteQueueRequest = _reflection.GeneratedProtocolMessageType('DeleteQueueRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETEQUEUEREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for + [DeleteQueue][google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue]. + + + Attributes: + name: + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.DeleteQueueRequest) + )) +_sym_db.RegisterMessage(DeleteQueueRequest) + +PurgeQueueRequest = _reflection.GeneratedProtocolMessageType('PurgeQueueRequest', (_message.Message,), dict( + DESCRIPTOR = _PURGEQUEUEREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for + [PurgeQueue][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue]. + + + Attributes: + name: + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.PurgeQueueRequest) + )) +_sym_db.RegisterMessage(PurgeQueueRequest) + +PauseQueueRequest = _reflection.GeneratedProtocolMessageType('PauseQueueRequest', (_message.Message,), dict( + DESCRIPTOR = _PAUSEQUEUEREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for + [PauseQueue][google.cloud.tasks.v2beta2.CloudTasks.PauseQueue]. + + + Attributes: + name: + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.PauseQueueRequest) + )) +_sym_db.RegisterMessage(PauseQueueRequest) + +ResumeQueueRequest = _reflection.GeneratedProtocolMessageType('ResumeQueueRequest', (_message.Message,), dict( + DESCRIPTOR = _RESUMEQUEUEREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. + + + Attributes: + name: + Required. The queue name. For example: + ``projects/PROJECT_ID/location/LOCATION_ID/queues/QUEUE_ID`` + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ResumeQueueRequest) + )) +_sym_db.RegisterMessage(ResumeQueueRequest) + +ListTasksRequest = _reflection.GeneratedProtocolMessageType('ListTasksRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTTASKSREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + + Attributes: + parent: + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + response_view: + The response\_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. By + default response\_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google IAM `__ + permission on the [Task][google.cloud.tasks.v2beta2.Task] + resource. + order_by: + Sort order used for the query. The only fields supported for + sorting are ``schedule_time`` and ``pull_message.tag``. All + results will be returned in approximately ascending order. The + default ordering is by ``schedule_time``. + page_size: + Requested page size. Fewer tasks than requested might be + returned. The maximum page size is 1000. If unspecified, the + page size will be the maximum. Fewer tasks than requested + might be returned, even if more tasks exist; use [next\_page\_ + token][google.cloud.tasks.v2beta2.ListTasksResponse.next\_page + \_token] in the response to determine if more tasks exist. + page_token: + A token identifying the page of results to return. To request + the first page results, page\_token must be empty. To request + the next page of results, page\_token must be the value of [ne + xt\_page\_token][google.cloud.tasks.v2beta2.ListTasksResponse. + next\_page\_token] returned from the previous call to + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] + method. The page token is valid for only 2 hours. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListTasksRequest) + )) +_sym_db.RegisterMessage(ListTasksRequest) + +ListTasksResponse = _reflection.GeneratedProtocolMessageType('ListTasksResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTTASKSRESPONSE, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Response message for listing tasks using + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + + Attributes: + tasks: + The list of tasks. + next_page_token: + A token to retrieve next page of results. To return the next + page of results, call + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks] + with this value as the [page\_token][google.cloud.tasks.v2beta + 2.ListTasksRequest.page\_token]. If the next\_page\_token is + empty, there are no more results. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.ListTasksResponse) + )) +_sym_db.RegisterMessage(ListTasksResponse) + +GetTaskRequest = _reflection.GeneratedProtocolMessageType('GetTaskRequest', (_message.Message,), dict( + DESCRIPTOR = _GETTASKREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for getting a task using + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask]. + + + Attributes: + name: + Required. The task name. For example: ``projects/PROJECT_ID/l + ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view: + The response\_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. By + default response\_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google IAM `__ + permission on the [Task][google.cloud.tasks.v2beta2.Task] + resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.GetTaskRequest) + )) +_sym_db.RegisterMessage(GetTaskRequest) + +CreateTaskRequest = _reflection.GeneratedProtocolMessageType('CreateTaskRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATETASKREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + + + Attributes: + parent: + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + The queue must already exist. + task: + Required. The task to add. Task names have the following + format: ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUE + UE_ID/tasks/TASK_ID``. The user can optionally specify a task + [name][google.cloud.tasks.v2beta2.Task.name]. If a name is not + specified then the system will generate a random unique task + id, which will be set in the task returned in the + [response][google.cloud.tasks.v2beta2.Task.name]. If [schedul + e\_time][google.cloud.tasks.v2beta2.Task.schedule\_time] is + not set or is in the past then Cloud Tasks will set it to the + current time. Task De-duplication: Explicitly specifying a + task ID enables task de-duplication. If a task's ID is + identical to that of an existing task or a task that was + deleted or completed recently then the call will fail with + [ALREADY\_EXISTS][google.rpc.Code.ALREADY\_EXISTS]. If the + task's queue was created using Cloud Tasks, then another task + with the same name can't be created for ~1hour after the + original task was deleted or completed. If the task's queue + was created using queue.yaml or queue.xml, then another task + with the same name can't be created for ~9days after the + original task was deleted or completed. Because there is an + extra lookup cost to identify duplicate task names, these + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + calls have significantly increased latency. Using hashed + strings for the task id or for the prefix of the task id is + recommended. Choosing task ids that are sequential or have + sequential prefixes, for example using a timestamp, causes an + increase in latency and error rates in all task commands. The + infrastructure relies on an approximately uniform distribution + of task ids to store and serve tasks efficiently. + response_view: + The response\_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. By + default response\_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google IAM `__ + permission on the [Task][google.cloud.tasks.v2beta2.Task] + resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.CreateTaskRequest) + )) +_sym_db.RegisterMessage(CreateTaskRequest) + +DeleteTaskRequest = _reflection.GeneratedProtocolMessageType('DeleteTaskRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETETASKREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for deleting a task using + [DeleteTask][google.cloud.tasks.v2beta2.CloudTasks.DeleteTask]. + + + Attributes: + name: + Required. The task name. For example: ``projects/PROJECT_ID/l + ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.DeleteTaskRequest) + )) +_sym_db.RegisterMessage(DeleteTaskRequest) + +LeaseTasksRequest = _reflection.GeneratedProtocolMessageType('LeaseTasksRequest', (_message.Message,), dict( + DESCRIPTOR = _LEASETASKSREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + + Attributes: + parent: + Required. The queue name. For example: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + max_tasks: + The maximum number of tasks to lease. The maximum that can be + requested is 1000. + lease_duration: + After the worker has successfully finished the work associated + with the task, the worker must call via [AcknowledgeTask][goog + le.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] before the + [schedule\_time][google.cloud.tasks.v2beta2.Task.schedule\_tim + e]. Otherwise the task will be returned to a later + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + call so that another worker can retry it. The maximum lease + duration is 1 week. ``lease_duration`` will be truncated to + the nearest second. + response_view: + The response\_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. By + default response\_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google IAM `__ + permission on the [Task][google.cloud.tasks.v2beta2.Task] + resource. + filter: + ``filter`` can be used to specify a subset of tasks to lease. + When ``filter`` is set to ``tag=`` then the + [response][google.cloud.tasks.v2beta2.LeaseTasksResponse] will + contain only tasks whose + [tag][google.cloud.tasks.v2beta2.PullMessage.tag] is equal to + ````. ```` must be less than 500 characters. + When ``filter`` is set to ``tag_function=oldest_tag()``, only + tasks which have the same tag as the task with the oldest [sch + edule\_time][google.cloud.tasks.v2beta2.Task.schedule\_time] + will be returned. Grammar Syntax: - ``filter = "tag=" tag | + "tag_function=" function`` - ``tag = string`` - ``function + = "oldest_tag()"`` The ``oldest_tag()`` function returns + tasks which have the same tag as the oldest task (ordered by + schedule time). SDK compatibility: Although the SDK allows + tags to be either string or `bytes `__, only UTF-8 encoded tags can be used in + Cloud Tasks. Tag which aren't UTF-8 encoded can't be used in + the + [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter] + and the task's + [tag][google.cloud.tasks.v2beta2.PullMessage.tag] will be + displayed as empty in Cloud Tasks. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.LeaseTasksRequest) + )) +_sym_db.RegisterMessage(LeaseTasksRequest) + +LeaseTasksResponse = _reflection.GeneratedProtocolMessageType('LeaseTasksResponse', (_message.Message,), dict( + DESCRIPTOR = _LEASETASKSRESPONSE, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Response message for leasing tasks using + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + + + Attributes: + tasks: + The leased tasks. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.LeaseTasksResponse) + )) +_sym_db.RegisterMessage(LeaseTasksResponse) + +AcknowledgeTaskRequest = _reflection.GeneratedProtocolMessageType('AcknowledgeTaskRequest', (_message.Message,), dict( + DESCRIPTOR = _ACKNOWLEDGETASKREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for acknowledging a task using + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask]. + + + Attributes: + name: + Required. The task name. For example: ``projects/PROJECT_ID/l + ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time: + Required. The task's current schedule time, available in the + [schedule\_time][google.cloud.tasks.v2beta2.Task.schedule\_tim + e] returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AcknowledgeTaskRequest) + )) +_sym_db.RegisterMessage(AcknowledgeTaskRequest) + +RenewLeaseRequest = _reflection.GeneratedProtocolMessageType('RenewLeaseRequest', (_message.Message,), dict( + DESCRIPTOR = _RENEWLEASEREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for renewing a lease using + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease]. + + + Attributes: + name: + Required. The task name. For example: ``projects/PROJECT_ID/l + ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time: + Required. The task's current schedule time, available in the + [schedule\_time][google.cloud.tasks.v2beta2.Task.schedule\_tim + e] returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + lease_duration: + Required. The desired new lease duration, starting from now. + The maximum lease duration is 1 week. ``lease_duration`` will + be truncated to the nearest second. + response_view: + The response\_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. By + default response\_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google IAM `__ + permission on the [Task][google.cloud.tasks.v2beta2.Task] + resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RenewLeaseRequest) + )) +_sym_db.RegisterMessage(RenewLeaseRequest) + +CancelLeaseRequest = _reflection.GeneratedProtocolMessageType('CancelLeaseRequest', (_message.Message,), dict( + DESCRIPTOR = _CANCELLEASEREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for canceling a lease using + [CancelLease][google.cloud.tasks.v2beta2.CloudTasks.CancelLease]. + + + Attributes: + name: + Required. The task name. For example: ``projects/PROJECT_ID/l + ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + schedule_time: + Required. The task's current schedule time, available in the + [schedule\_time][google.cloud.tasks.v2beta2.Task.schedule\_tim + e] returned by + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + response or + [RenewLease][google.cloud.tasks.v2beta2.CloudTasks.RenewLease] + response. This restriction is to ensure that your worker + currently holds the lease. + response_view: + The response\_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. By + default response\_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google IAM `__ + permission on the [Task][google.cloud.tasks.v2beta2.Task] + resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.CancelLeaseRequest) + )) +_sym_db.RegisterMessage(CancelLeaseRequest) + +RunTaskRequest = _reflection.GeneratedProtocolMessageType('RunTaskRequest', (_message.Message,), dict( + DESCRIPTOR = _RUNTASKREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.cloudtasks_pb2' + , + __doc__ = """Request message for forcing a task to run now using + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask]. + + + Attributes: + name: + Required. The task name. For example: ``projects/PROJECT_ID/l + ocations/LOCATION_ID/queues/QUEUE_ID/tasks/TASK_ID`` + response_view: + The response\_view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] will be returned. By + default response\_view is + [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC]; not all + information is retrieved by default because some data, such as + payloads, might be desirable to return only when needed + because of its large size or because of the sensitivity of + data that it contains. Authorization for + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL] requires + ``cloudtasks.tasks.fullView`` `Google IAM `__ + permission on the [Task][google.cloud.tasks.v2beta2.Task] + resource. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RunTaskRequest) + )) +_sym_db.RegisterMessage(RunTaskRequest) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\036com.google.cloud.tasks.v2beta2B\017CloudTasksProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks')) + +_CLOUDTASKS = _descriptor.ServiceDescriptor( + name='CloudTasks', + full_name='google.cloud.tasks.v2beta2.CloudTasks', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=2285, + serialized_end=5597, + methods=[ + _descriptor.MethodDescriptor( + name='ListQueues', + full_name='google.cloud.tasks.v2beta2.CloudTasks.ListQueues', + index=0, + containing_service=None, + input_type=_LISTQUEUESREQUEST, + output_type=_LISTQUEUESRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0021\022//v2beta2/{parent=projects/*/locations/*}/queues')), + ), + _descriptor.MethodDescriptor( + name='GetQueue', + full_name='google.cloud.tasks.v2beta2.CloudTasks.GetQueue', + index=1, + containing_service=None, + input_type=_GETQUEUEREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0021\022//v2beta2/{name=projects/*/locations/*/queues/*}')), + ), + _descriptor.MethodDescriptor( + name='CreateQueue', + full_name='google.cloud.tasks.v2beta2.CloudTasks.CreateQueue', + index=2, + containing_service=None, + input_type=_CREATEQUEUEREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0028\"//v2beta2/{parent=projects/*/locations/*}/queues:\005queue')), + ), + _descriptor.MethodDescriptor( + name='UpdateQueue', + full_name='google.cloud.tasks.v2beta2.CloudTasks.UpdateQueue', + index=3, + containing_service=None, + input_type=_UPDATEQUEUEREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002>25/v2beta2/{queue.name=projects/*/locations/*/queues/*}:\005queue')), + ), + _descriptor.MethodDescriptor( + name='DeleteQueue', + full_name='google.cloud.tasks.v2beta2.CloudTasks.DeleteQueue', + index=4, + containing_service=None, + input_type=_DELETEQUEUEREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0021*//v2beta2/{name=projects/*/locations/*/queues/*}')), + ), + _descriptor.MethodDescriptor( + name='PurgeQueue', + full_name='google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue', + index=5, + containing_service=None, + input_type=_PURGEQUEUEREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002:\"5/v2beta2/{name=projects/*/locations/*/queues/*}:purge:\001*')), + ), + _descriptor.MethodDescriptor( + name='PauseQueue', + full_name='google.cloud.tasks.v2beta2.CloudTasks.PauseQueue', + index=6, + containing_service=None, + input_type=_PAUSEQUEUEREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002:\"5/v2beta2/{name=projects/*/locations/*/queues/*}:pause:\001*')), + ), + _descriptor.MethodDescriptor( + name='ResumeQueue', + full_name='google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue', + index=7, + containing_service=None, + input_type=_RESUMEQUEUEREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2._QUEUE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002;\"6/v2beta2/{name=projects/*/locations/*/queues/*}:resume:\001*')), + ), + _descriptor.MethodDescriptor( + name='GetIamPolicy', + full_name='google.cloud.tasks.v2beta2.CloudTasks.GetIamPolicy', + index=8, + containing_service=None, + input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._GETIAMPOLICYREQUEST, + output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002E\"@/v2beta2/{resource=projects/*/locations/*/queues/*}:getIamPolicy:\001*')), + ), + _descriptor.MethodDescriptor( + name='SetIamPolicy', + full_name='google.cloud.tasks.v2beta2.CloudTasks.SetIamPolicy', + index=9, + containing_service=None, + input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._SETIAMPOLICYREQUEST, + output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002E\"@/v2beta2/{resource=projects/*/locations/*/queues/*}:setIamPolicy:\001*')), + ), + _descriptor.MethodDescriptor( + name='TestIamPermissions', + full_name='google.cloud.tasks.v2beta2.CloudTasks.TestIamPermissions', + index=10, + containing_service=None, + input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSREQUEST, + output_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002K\"F/v2beta2/{resource=projects/*/locations/*/queues/*}:testIamPermissions:\001*')), + ), + _descriptor.MethodDescriptor( + name='ListTasks', + full_name='google.cloud.tasks.v2beta2.CloudTasks.ListTasks', + index=11, + containing_service=None, + input_type=_LISTTASKSREQUEST, + output_type=_LISTTASKSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0029\0227/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks')), + ), + _descriptor.MethodDescriptor( + name='GetTask', + full_name='google.cloud.tasks.v2beta2.CloudTasks.GetTask', + index=12, + containing_service=None, + input_type=_GETTASKREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0029\0227/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}')), + ), + _descriptor.MethodDescriptor( + name='CreateTask', + full_name='google.cloud.tasks.v2beta2.CloudTasks.CreateTask', + index=13, + containing_service=None, + input_type=_CREATETASKREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002<\"7/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:\001*')), + ), + _descriptor.MethodDescriptor( + name='DeleteTask', + full_name='google.cloud.tasks.v2beta2.CloudTasks.DeleteTask', + index=14, + containing_service=None, + input_type=_DELETETASKREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0029*7/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}')), + ), + _descriptor.MethodDescriptor( + name='LeaseTasks', + full_name='google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks', + index=15, + containing_service=None, + input_type=_LEASETASKSREQUEST, + output_type=_LEASETASKSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002B\"=/v2beta2/{parent=projects/*/locations/*/queues/*}/tasks:lease:\001*')), + ), + _descriptor.MethodDescriptor( + name='AcknowledgeTask', + full_name='google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask', + index=16, + containing_service=None, + input_type=_ACKNOWLEDGETASKREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002H\"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:acknowledge:\001*')), + ), + _descriptor.MethodDescriptor( + name='RenewLease', + full_name='google.cloud.tasks.v2beta2.CloudTasks.RenewLease', + index=17, + containing_service=None, + input_type=_RENEWLEASEREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002G\"B/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:renewLease:\001*')), + ), + _descriptor.MethodDescriptor( + name='CancelLease', + full_name='google.cloud.tasks.v2beta2.CloudTasks.CancelLease', + index=18, + containing_service=None, + input_type=_CANCELLEASEREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002H\"C/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:cancelLease:\001*')), + ), + _descriptor.MethodDescriptor( + name='RunTask', + full_name='google.cloud.tasks.v2beta2.CloudTasks.RunTask', + index=19, + containing_service=None, + input_type=_RUNTASKREQUEST, + output_type=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2._TASK, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002@\";/v2beta2/{name=projects/*/locations/*/queues/*/tasks/*}:run:\001*')), + ), +]) +_sym_db.RegisterServiceDescriptor(_CLOUDTASKS) + +DESCRIPTOR.services_by_name['CloudTasks'] = _CLOUDTASKS + +# @@protoc_insertion_point(module_scope) diff --git a/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2_grpc.py b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2_grpc.py new file mode 100644 index 000000000000..1866998e0952 --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/proto/cloudtasks_pb2_grpc.py @@ -0,0 +1,550 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2 +from google.cloud.tasks_v2beta2.proto import queue_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2 +from google.cloud.tasks_v2beta2.proto import task_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2 +from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 +from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class CloudTasksStub(object): + """Cloud Tasks allows developers to manage the execution of background + work in their applications. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.ListQueues = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/ListQueues', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.FromString, + ) + self.GetQueue = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/GetQueue', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, + ) + self.CreateQueue = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/CreateQueue', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, + ) + self.UpdateQueue = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/UpdateQueue', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, + ) + self.DeleteQueue = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/DeleteQueue', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.PurgeQueue = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/PurgeQueue', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, + ) + self.PauseQueue = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/PauseQueue', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, + ) + self.ResumeQueue = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/ResumeQueue', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.FromString, + ) + self.GetIamPolicy = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/GetIamPolicy', + request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, + ) + self.SetIamPolicy = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/SetIamPolicy', + request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, + ) + self.TestIamPermissions = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/TestIamPermissions', + request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, + ) + self.ListTasks = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/ListTasks', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.FromString, + ) + self.GetTask = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/GetTask', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, + ) + self.CreateTask = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/CreateTask', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, + ) + self.DeleteTask = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/DeleteTask', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.LeaseTasks = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/LeaseTasks', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.LeaseTasksRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.LeaseTasksResponse.FromString, + ) + self.AcknowledgeTask = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/AcknowledgeTask', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.AcknowledgeTaskRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.RenewLease = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/RenewLease', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.RenewLeaseRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, + ) + self.CancelLease = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/CancelLease', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CancelLeaseRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, + ) + self.RunTask = channel.unary_unary( + '/google.cloud.tasks.v2beta2.CloudTasks/RunTask', + request_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.FromString, + ) + + +class CloudTasksServicer(object): + """Cloud Tasks allows developers to manage the execution of background + work in their applications. + """ + + def ListQueues(self, request, context): + """Lists queues. + + Queues are returned in lexicographical order. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetQueue(self, request, context): + """Gets a queue. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateQueue(self, request, context): + """Creates a queue. + + Queues created with this method allow tasks to live for a maximum of 31 + days. After a task is 31 days old, the task will be deleted regardless of whether + it was dispatched or not. + + WARNING: Using this method may have unintended side effects if you are + using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + Read + [Overview of Queue Management and queue.yaml](/cloud-tasks/docs/queue-yaml) + before using this method. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateQueue(self, request, context): + """Updates a queue. + + This method creates the queue if it does not exist and updates + the queue if it does exist. + + Queues created with this method allow tasks to live for a maximum of 31 + days. After a task is 31 days old, the task will be deleted regardless of whether + it was dispatched or not. + + WARNING: Using this method may have unintended side effects if you are + using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + Read + [Overview of Queue Management and queue.yaml](/cloud-tasks/docs/queue-yaml) + before using this method. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteQueue(self, request, context): + """Deletes a queue. + + This command will delete the queue even if it has tasks in it. + + Note: If you delete a queue, a queue with the same name can't be created + for 7 days. + + WARNING: Using this method may have unintended side effects if you are + using an App Engine `queue.yaml` or `queue.xml` file to manage your queues. + Read + [Overview of Queue Management and queue.yaml](/cloud-tasks/docs/queue-yaml) + before using this method. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def PurgeQueue(self, request, context): + """Purges a queue by deleting all of its tasks. + + All tasks created before this method is called are permanently deleted. + + Purge operations can take up to one minute to take effect. Tasks + might be dispatched before the purge takes effect. A purge is irreversible. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def PauseQueue(self, request, context): + """Pauses the queue. + + If a queue is paused then the system will stop dispatching tasks + until the queue is resumed via + [ResumeQueue][google.cloud.tasks.v2beta2.CloudTasks.ResumeQueue]. Tasks can still be added + when the queue is paused. A queue is paused if its + [state][google.cloud.tasks.v2beta2.Queue.state] is [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ResumeQueue(self, request, context): + """Resume a queue. + + This method resumes a queue after it has been + [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED] or + [DISABLED][google.cloud.tasks.v2beta2.Queue.State.DISABLED]. The state of a queue is stored + in the queue's [state][google.cloud.tasks.v2beta2.Queue.state]; after calling this method it + will be set to [RUNNING][google.cloud.tasks.v2beta2.Queue.State.RUNNING]. + + WARNING: Resuming many high-QPS queues at the same time can + lead to target overloading. If you are resuming high-QPS + queues, follow the 500/50/5 pattern described in + [Managing Cloud Tasks Scaling Risks](/cloud-tasks/pdfs/managing-cloud-tasks-scaling-risks-2017-06-05.pdf). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetIamPolicy(self, request, context): + """Gets the access control policy for a [Queue][google.cloud.tasks.v2beta2.Queue]. + Returns an empty policy if the resource exists and does not have a policy + set. + + Authorization requires the following [Google IAM](/iam) permission on the + specified resource parent: + + * `cloudtasks.queues.getIamPolicy` + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetIamPolicy(self, request, context): + """Sets the access control policy for a [Queue][google.cloud.tasks.v2beta2.Queue]. Replaces any existing + policy. + + Note: The Cloud Console does not check queue-level IAM permissions yet. + Project-level permissions are required to use the Cloud Console. + + Authorization requires the following [Google IAM](/iam) permission on the + specified resource parent: + + * `cloudtasks.queues.setIamPolicy` + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def TestIamPermissions(self, request, context): + """Returns permissions that a caller has on a [Queue][google.cloud.tasks.v2beta2.Queue]. + If the resource does not exist, this will return an empty set of + permissions, not a [NOT_FOUND][google.rpc.Code.NOT_FOUND] error. + + Note: This operation is designed to be used for building permission-aware + UIs and command-line tools, not for authorization checking. This operation + may "fail open" without warning. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTasks(self, request, context): + """Lists the tasks in a queue. + + By default, only the [BASIC][google.cloud.tasks.v2beta2.Task.View.BASIC] view is retrieved + due to performance considerations; + [response_view][google.cloud.tasks.v2beta2.ListTasksRequest.response_view] controls the + subset of information which is returned. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTask(self, request, context): + """Gets a task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateTask(self, request, context): + """Creates a task and adds it to a queue. + + To add multiple tasks at the same time, use + [HTTP batching](/storage/docs/json_api/v1/how-tos/batch) + or the batching documentation for your client library, for example + https://developers.google.com/api-client-library/python/guide/batch. + + Tasks cannot be updated after creation; there is no UpdateTask command. + + * For [App Engine queues](google.cloud.tasks.v2beta2.AppEngineHttpTarget), + the maximum task size is 100KB. + * For [pull queues](google.cloud.tasks.v2beta2.PullTarget), this + the maximum task size is 1MB. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTask(self, request, context): + """Deletes a task. + + A task can be deleted if it is scheduled or dispatched. A task + cannot be deleted if it has completed successfully or permanently + failed. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def LeaseTasks(self, request, context): + """Leases tasks from a pull queue for + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration]. + + This method is invoked by the worker to obtain a lease. The + worker must acknowledge the task via + [AcknowledgeTask][google.cloud.tasks.v2beta2.CloudTasks.AcknowledgeTask] after they have + performed the work associated with the task. + + The [payload][google.cloud.tasks.v2beta2.PullMessage.payload] is intended to store data that + the worker needs to perform the work associated with the task. To + return the payloads in the [response][google.cloud.tasks.v2beta2.LeaseTasksResponse], set + [response_view][google.cloud.tasks.v2beta2.LeaseTasksRequest.response_view] to + [FULL][google.cloud.tasks.v2beta2.Task.View.FULL]. + + A maximum of 10 qps of [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per + queue. [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] + is returned when this limit is + exceeded. [RESOURCE_EXHAUSTED][google.rpc.Code.RESOURCE_EXHAUSTED] + is also returned when + [max_tasks_dispatched_per_second][google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second] + is exceeded. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def AcknowledgeTask(self, request, context): + """Acknowledges a pull task. + + The worker, that is, the entity that + [leased][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] this task must call this method + to indicate that the work associated with the task has finished. + + The worker must acknowledge a task within the + [lease_duration][google.cloud.tasks.v2beta2.LeaseTasksRequest.lease_duration] or the lease + will expire and the task will become available to be leased + again. After the task is acknowledged, it will not be returned + by a later [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks], + [GetTask][google.cloud.tasks.v2beta2.CloudTasks.GetTask], or + [ListTasks][google.cloud.tasks.v2beta2.CloudTasks.ListTasks]. + + To acknowledge multiple tasks at the same time, use + [HTTP batching](/storage/docs/json_api/v1/how-tos/batch) + or the batching documentation for your client library, for example + https://developers.google.com/api-client-library/python/guide/batch. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RenewLease(self, request, context): + """Renew the current lease of a pull task. + + The worker can use this method to extend the lease by a new + duration, starting from now. The new task lease will be + returned in the task's [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time]. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CancelLease(self, request, context): + """Cancel a pull task's lease. + + The worker can use this method to cancel a task's lease by + setting its [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] to now. This will + make the task available to be leased to the next caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks]. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def RunTask(self, request, context): + """Forces a task to run now. + + When this method is called, Cloud Tasks will dispatch the task, even if + the task is already running, the queue has reached its [RateLimits][google.cloud.tasks.v2beta2.RateLimits] or + is [PAUSED][google.cloud.tasks.v2beta2.Queue.State.PAUSED]. + + This command is meant to be used for manual debugging. For + example, [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] can be used to retry a failed + task after a fix has been made or to manually force a task to be + dispatched now. + + The dispatched task is returned. That is, the task that is returned + contains the [status][google.cloud.tasks.v2beta2.Task.status] after the task is dispatched but + before the task is received by its target. + + If Cloud Tasks receives a successful response from the task's + target, then the task will be deleted; otherwise the task's + [schedule_time][google.cloud.tasks.v2beta2.Task.schedule_time] will be reset to the time that + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] was called plus the retry delay specified + in the queue's [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig]. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] returns + [NOT_FOUND][google.rpc.Code.NOT_FOUND] when it is called on a + task that has already succeeded or permanently failed. + + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask] cannot be called on a + [pull task][google.cloud.tasks.v2beta2.PullMessage]. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_CloudTasksServicer_to_server(servicer, server): + rpc_method_handlers = { + 'ListQueues': grpc.unary_unary_rpc_method_handler( + servicer.ListQueues, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListQueuesRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListQueuesResponse.SerializeToString, + ), + 'GetQueue': grpc.unary_unary_rpc_method_handler( + servicer.GetQueue, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.GetQueueRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, + ), + 'CreateQueue': grpc.unary_unary_rpc_method_handler( + servicer.CreateQueue, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CreateQueueRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, + ), + 'UpdateQueue': grpc.unary_unary_rpc_method_handler( + servicer.UpdateQueue, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.UpdateQueueRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, + ), + 'DeleteQueue': grpc.unary_unary_rpc_method_handler( + servicer.DeleteQueue, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.DeleteQueueRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'PurgeQueue': grpc.unary_unary_rpc_method_handler( + servicer.PurgeQueue, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.PurgeQueueRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, + ), + 'PauseQueue': grpc.unary_unary_rpc_method_handler( + servicer.PauseQueue, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.PauseQueueRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, + ), + 'ResumeQueue': grpc.unary_unary_rpc_method_handler( + servicer.ResumeQueue, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ResumeQueueRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_queue__pb2.Queue.SerializeToString, + ), + 'GetIamPolicy': grpc.unary_unary_rpc_method_handler( + servicer.GetIamPolicy, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, + ), + 'SetIamPolicy': grpc.unary_unary_rpc_method_handler( + servicer.SetIamPolicy, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, + ), + 'TestIamPermissions': grpc.unary_unary_rpc_method_handler( + servicer.TestIamPermissions, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString, + ), + 'ListTasks': grpc.unary_unary_rpc_method_handler( + servicer.ListTasks, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListTasksRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.ListTasksResponse.SerializeToString, + ), + 'GetTask': grpc.unary_unary_rpc_method_handler( + servicer.GetTask, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.GetTaskRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.SerializeToString, + ), + 'CreateTask': grpc.unary_unary_rpc_method_handler( + servicer.CreateTask, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CreateTaskRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.SerializeToString, + ), + 'DeleteTask': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTask, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.DeleteTaskRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'LeaseTasks': grpc.unary_unary_rpc_method_handler( + servicer.LeaseTasks, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.LeaseTasksRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.LeaseTasksResponse.SerializeToString, + ), + 'AcknowledgeTask': grpc.unary_unary_rpc_method_handler( + servicer.AcknowledgeTask, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.AcknowledgeTaskRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'RenewLease': grpc.unary_unary_rpc_method_handler( + servicer.RenewLease, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.RenewLeaseRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.SerializeToString, + ), + 'CancelLease': grpc.unary_unary_rpc_method_handler( + servicer.CancelLease, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.CancelLeaseRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.SerializeToString, + ), + 'RunTask': grpc.unary_unary_rpc_method_handler( + servicer.RunTask, + request_deserializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_cloudtasks__pb2.RunTaskRequest.FromString, + response_serializer=google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_task__pb2.Task.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.tasks.v2beta2.CloudTasks', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/tasks/google/cloud/tasks_v2beta2/proto/queue_pb2.py b/tasks/google/cloud/tasks_v2beta2/proto/queue_pb2.py new file mode 100644 index 000000000000..14b496508a5a --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/proto/queue_pb2.py @@ -0,0 +1,548 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/tasks_v2beta2/proto/queue.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.tasks_v2beta2.proto import target_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/tasks_v2beta2/proto/queue.proto', + package='google.cloud.tasks.v2beta2', + syntax='proto3', + serialized_pb=_b('\n,google/cloud/tasks_v2beta2/proto/queue.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto\x1a-google/cloud/tasks_v2beta2/proto/target.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xe1\x03\n\x05Queue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12Q\n\x16\x61pp_engine_http_target\x18\x03 \x01(\x0b\x32/.google.cloud.tasks.v2beta2.AppEngineHttpTargetH\x00\x12=\n\x0bpull_target\x18\x04 \x01(\x0b\x32&.google.cloud.tasks.v2beta2.PullTargetH\x00\x12;\n\x0brate_limits\x18\x05 \x01(\x0b\x32&.google.cloud.tasks.v2beta2.RateLimits\x12=\n\x0cretry_config\x18\x06 \x01(\x0b\x32\'.google.cloud.tasks.v2beta2.RetryConfig\x12\x36\n\x05state\x18\x07 \x01(\x0e\x32\'.google.cloud.tasks.v2beta2.Queue.State\x12.\n\npurge_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"E\n\x05State\x12\x15\n\x11STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07RUNNING\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\x0c\n\x08\x44ISABLED\x10\x03\x42\r\n\x0btarget_type\"k\n\nRateLimits\x12\'\n\x1fmax_tasks_dispatched_per_second\x18\x01 \x01(\x01\x12\x16\n\x0emax_burst_size\x18\x02 \x01(\x05\x12\x1c\n\x14max_concurrent_tasks\x18\x03 \x01(\x05\"\x81\x02\n\x0bRetryConfig\x12\x16\n\x0cmax_attempts\x18\x01 \x01(\x05H\x00\x12\x1c\n\x12unlimited_attempts\x18\x02 \x01(\x08H\x00\x12\x35\n\x12max_retry_duration\x18\x03 \x01(\x0b\x32\x19.google.protobuf.Duration\x12.\n\x0bmin_backoff\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12.\n\x0bmax_backoff\x18\x05 \x01(\x0b\x32\x19.google.protobuf.Duration\x12\x15\n\rmax_doublings\x18\x06 \x01(\x05\x42\x0e\n\x0cnum_attemptsBo\n\x1e\x63om.google.cloud.tasks.v2beta2B\nQueueProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasksb\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) + + + +_QUEUE_STATE = _descriptor.EnumDescriptor( + name='State', + full_name='google.cloud.tasks.v2beta2.Queue.State', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='STATE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RUNNING', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PAUSED', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISABLED', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=616, + serialized_end=685, +) +_sym_db.RegisterEnumDescriptor(_QUEUE_STATE) + + +_QUEUE = _descriptor.Descriptor( + name='Queue', + full_name='google.cloud.tasks.v2beta2.Queue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.Queue.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='app_engine_http_target', full_name='google.cloud.tasks.v2beta2.Queue.app_engine_http_target', index=1, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pull_target', full_name='google.cloud.tasks.v2beta2.Queue.pull_target', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rate_limits', full_name='google.cloud.tasks.v2beta2.Queue.rate_limits', index=3, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='retry_config', full_name='google.cloud.tasks.v2beta2.Queue.retry_config', index=4, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='state', full_name='google.cloud.tasks.v2beta2.Queue.state', index=5, + number=7, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='purge_time', full_name='google.cloud.tasks.v2beta2.Queue.purge_time', index=6, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _QUEUE_STATE, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='target_type', full_name='google.cloud.tasks.v2beta2.Queue.target_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=219, + serialized_end=700, +) + + +_RATELIMITS = _descriptor.Descriptor( + name='RateLimits', + full_name='google.cloud.tasks.v2beta2.RateLimits', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='max_tasks_dispatched_per_second', full_name='google.cloud.tasks.v2beta2.RateLimits.max_tasks_dispatched_per_second', index=0, + number=1, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_burst_size', full_name='google.cloud.tasks.v2beta2.RateLimits.max_burst_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_concurrent_tasks', full_name='google.cloud.tasks.v2beta2.RateLimits.max_concurrent_tasks', index=2, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=702, + serialized_end=809, +) + + +_RETRYCONFIG = _descriptor.Descriptor( + name='RetryConfig', + full_name='google.cloud.tasks.v2beta2.RetryConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='max_attempts', full_name='google.cloud.tasks.v2beta2.RetryConfig.max_attempts', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='unlimited_attempts', full_name='google.cloud.tasks.v2beta2.RetryConfig.unlimited_attempts', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_retry_duration', full_name='google.cloud.tasks.v2beta2.RetryConfig.max_retry_duration', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='min_backoff', full_name='google.cloud.tasks.v2beta2.RetryConfig.min_backoff', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_backoff', full_name='google.cloud.tasks.v2beta2.RetryConfig.max_backoff', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='max_doublings', full_name='google.cloud.tasks.v2beta2.RetryConfig.max_doublings', index=5, + number=6, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='num_attempts', full_name='google.cloud.tasks.v2beta2.RetryConfig.num_attempts', + index=0, containing_type=None, fields=[]), + ], + serialized_start=812, + serialized_end=1069, +) + +_QUEUE.fields_by_name['app_engine_http_target'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2._APPENGINEHTTPTARGET +_QUEUE.fields_by_name['pull_target'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2._PULLTARGET +_QUEUE.fields_by_name['rate_limits'].message_type = _RATELIMITS +_QUEUE.fields_by_name['retry_config'].message_type = _RETRYCONFIG +_QUEUE.fields_by_name['state'].enum_type = _QUEUE_STATE +_QUEUE.fields_by_name['purge_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_QUEUE_STATE.containing_type = _QUEUE +_QUEUE.oneofs_by_name['target_type'].fields.append( + _QUEUE.fields_by_name['app_engine_http_target']) +_QUEUE.fields_by_name['app_engine_http_target'].containing_oneof = _QUEUE.oneofs_by_name['target_type'] +_QUEUE.oneofs_by_name['target_type'].fields.append( + _QUEUE.fields_by_name['pull_target']) +_QUEUE.fields_by_name['pull_target'].containing_oneof = _QUEUE.oneofs_by_name['target_type'] +_RETRYCONFIG.fields_by_name['max_retry_duration'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_RETRYCONFIG.fields_by_name['min_backoff'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_RETRYCONFIG.fields_by_name['max_backoff'].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_RETRYCONFIG.oneofs_by_name['num_attempts'].fields.append( + _RETRYCONFIG.fields_by_name['max_attempts']) +_RETRYCONFIG.fields_by_name['max_attempts'].containing_oneof = _RETRYCONFIG.oneofs_by_name['num_attempts'] +_RETRYCONFIG.oneofs_by_name['num_attempts'].fields.append( + _RETRYCONFIG.fields_by_name['unlimited_attempts']) +_RETRYCONFIG.fields_by_name['unlimited_attempts'].containing_oneof = _RETRYCONFIG.oneofs_by_name['num_attempts'] +DESCRIPTOR.message_types_by_name['Queue'] = _QUEUE +DESCRIPTOR.message_types_by_name['RateLimits'] = _RATELIMITS +DESCRIPTOR.message_types_by_name['RetryConfig'] = _RETRYCONFIG +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Queue = _reflection.GeneratedProtocolMessageType('Queue', (_message.Message,), dict( + DESCRIPTOR = _QUEUE, + __module__ = 'google.cloud.tasks_v2beta2.proto.queue_pb2' + , + __doc__ = """A queue is a container of related tasks. Queues are configured to manage + how those tasks are dispatched. Configurable properties include rate + limits, retry options, target types, and others. + + + Attributes: + name: + The queue name. The queue name must have the following + format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID`` + - ``PROJECT_ID`` can contain letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), colons (:), or periods (.). For more + information, see `Identifying projects `__ - ``LOCATION_ID`` is the + canonical ID for the queue's location. The list of + available locations can be obtained by calling [ListLocatio + ns][google.cloud.location.Locations.ListLocations]. For + more information, see + https://cloud.google.com/about/locations/. - ``QUEUE_ID`` can + contain letters ([A-Za-z]), numbers ([0-9]), or hyphens + (-). The maximum length is 100 characters. Caller-specified + and required in [CreateQueue][google.cloud.tasks.v2beta2.Cloud + Tasks.CreateQueue], after which it becomes output only. + target_type: + The queue's target. The target applies to all tasks in the + queue. Caller-specified and required in [CreateQueue][google. + cloud.tasks.v2beta2.CloudTasks.CreateQueue][], after which the + queue config type becomes output only, though fields within + the config are mutable. + app_engine_http_target: + App Engine HTTP target. An App Engine queue is a queue that + has an [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEng + ineHttpTarget]. + pull_target: + Pull target. A pull queue is a queue that has a + [PullTarget][google.cloud.tasks.v2beta2.PullTarget]. + rate_limits: + Rate limits for task dispatches. + [rate\_limits][google.cloud.tasks.v2beta2.Queue.rate\_limits] + and [retry\_config][google.cloud.tasks.v2beta2.Queue.retry\_co + nfig] are related because they both control task attempts + however they control how tasks are attempted in different + ways: - + [rate\_limits][google.cloud.tasks.v2beta2.Queue.rate\_limits] + controls the total rate of dispatches from a queue (i.e. all + traffic dispatched from the queue, regardless of whether + the dispatch is from a first attempt or a retry). - [retry + \_config][google.cloud.tasks.v2beta2.Queue.retry\_config] + controls what happens to particular a task after its first + attempt fails. That is, [retry\_config][google.cloud.tas + ks.v2beta2.Queue.retry\_config] controls task retries (the + second attempt, third attempt, etc). + retry_config: + Settings that determine the retry behavior. - For tasks + created using Cloud Tasks: the queue-level retry settings + apply to all tasks in the queue that were created using Cloud + Tasks. Retry settings cannot be set on individual tasks. - + For tasks created using the App Engine SDK: the queue-level + retry settings apply to all tasks in the queue which do not + have retry settings explicitly set on the task and were + created by the App Engine SDK. See `App Engine + documentation + `__. + state: + Output only. The state of the queue. ``state`` can only be + changed by called [PauseQueue][google.cloud.tasks.v2beta2.Clou + dTasks.PauseQueue], [ResumeQueue][google.cloud.tasks.v2beta2.C + loudTasks.ResumeQueue], or uploading `queue.yaml/xml + `__. [UpdateQueue][goo + gle.cloud.tasks.v2beta2.CloudTasks.UpdateQueue] cannot be used + to change ``state``. + purge_time: + Output only. The last time this queue was purged. All tasks + that were + [created][google.cloud.tasks.v2beta2.Task.create\_time] before + this time were purged. A queue can be purged using [PurgeQueu + e][google.cloud.tasks.v2beta2.CloudTasks.PurgeQueue], the `App + Engine Task Queue SDK, or the Cloud Console + `__. Purge + time will be truncated to the nearest microsecond. Purge time + will be unset if the queue has never been purged. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.Queue) + )) +_sym_db.RegisterMessage(Queue) + +RateLimits = _reflection.GeneratedProtocolMessageType('RateLimits', (_message.Message,), dict( + DESCRIPTOR = _RATELIMITS, + __module__ = 'google.cloud.tasks_v2beta2.proto.queue_pb2' + , + __doc__ = """Rate limits. + + This message determines the maximum rate that tasks can be dispatched by + a queue, regardless of whether the dispatch is a first task attempt or a + retry. + + Note: The debugging command, + [RunTask][google.cloud.tasks.v2beta2.CloudTasks.RunTask], will run a + task even if the queue has reached its + [RateLimits][google.cloud.tasks.v2beta2.RateLimits]. + + + Attributes: + max_tasks_dispatched_per_second: + The maximum rate at which tasks are dispatched from this + queue. If unspecified when the queue is created, Cloud Tasks + will pick the default. - For App Engine queues, the maximum + allowed value is 500. - This field is output only for `pull + queues `__. In addition + to the ``max_tasks_dispatched_per_second`` limit, a maximum + of 10 QPS of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + requests are allowed per pull queue. This field has the same + meaning as `rate in queue.yaml/xml + `__. + max_burst_size: + Output only. The max burst size. Max burst size limits how + fast tasks in queue are processed when many tasks are in the + queue and the rate is high. This field allows the queue to + have a high rate so processing starts shortly after a task is + enqueued, but still limits resource usage when many tasks are + enqueued in a short period of time. The `token bucket + `__ algorithm is used + to control the rate of task dispatches. Each queue has a token + bucket that holds tokens, up to the maximum specified by + ``max_burst_size``. Each time a task is dispatched, a token is + removed from the bucket. Tasks will be dispatched until the + queue's bucket runs out of tokens. The bucket will be + continuously refilled with new tokens based on [max\_tasks\_di + spatched\_per\_second][google.cloud.tasks.v2beta2.RateLimits.m + ax\_tasks\_dispatched\_per\_second]. Cloud Tasks will pick + the value of ``max_burst_size`` based on the value of [max\_ta + sks\_dispatched\_per\_second][google.cloud.tasks.v2beta2.RateL + imits.max\_tasks\_dispatched\_per\_second]. For App Engine + queues that were created or updated using ``queue.yaml/xml``, + ``max_burst_size`` is equal to `bucket\_size `__. Since + ``max_burst_size`` is output only, if [UpdateQueue][google.clo + ud.tasks.v2beta2.CloudTasks.UpdateQueue] is called on a queue + created by ``queue.yaml/xml``, ``max_burst_size`` will be + reset based on the value of [max\_tasks\_dispatched\_per\_seco + nd][google.cloud.tasks.v2beta2.RateLimits.max\_tasks\_dispatch + ed\_per\_second], regardless of whether [max\_tasks\_dispatche + d\_per\_second][google.cloud.tasks.v2beta2.RateLimits.max\_tas + ks\_dispatched\_per\_second] is updated. + max_concurrent_tasks: + The maximum number of concurrent tasks that Cloud Tasks allows + to be dispatched for this queue. After this threshold has been + reached, Cloud Tasks stops dispatching tasks until the number + of concurrent requests decreases. If unspecified when the + queue is created, Cloud Tasks will pick the default. The + maximum allowed value is 5,000. This field is output only for + `pull queues `__ and + always -1, which indicates no limit. No other queue types can + have ``max_concurrent_tasks`` set to -1. This field has the + same meaning as `max\_concurrent\_requests in queue.yaml/xml < + /appengine/docs/standard/python/config/queueref#max_concurrent + _requests>`__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RateLimits) + )) +_sym_db.RegisterMessage(RateLimits) + +RetryConfig = _reflection.GeneratedProtocolMessageType('RetryConfig', (_message.Message,), dict( + DESCRIPTOR = _RETRYCONFIG, + __module__ = 'google.cloud.tasks_v2beta2.proto.queue_pb2' + , + __doc__ = """Retry config. + + These settings determine how a failed task attempt is retried. + + + Attributes: + num_attempts: + Number of attempts per task. If unspecified when the queue is + created, Cloud Tasks will pick the default. This field has + the same meaning as `task\_retry\_limit in queue.yaml/xml + `__. + max_attempts: + The maximum number of attempts for a task. Cloud Tasks will + attempt the task ``max_attempts`` times (that is, if the first + attempt fails, then there will be ``max_attempts - 1`` + retries). Must be > 0. + unlimited_attempts: + If true, then the number of attempts is unlimited. + max_retry_duration: + If positive, ``max_retry_duration`` specifies the time limit + for retrying a failed task, measured from when the task was + first attempted. Once ``max_retry_duration`` time has passed + *and* the task has been attempted [max\_attempts][google.cloud + .tasks.v2beta2.RetryConfig.max\_attempts] times, no further + attempts will be made and the task will be deleted. If zero, + then the task age is unlimited. If unspecified when the queue + is created, Cloud Tasks will pick the default. This field is + output only for `pull queues + `__. + ``max_retry_duration`` will be truncated to the nearest + second. This field has the same meaning as `task\_age\_limit + in queue.yaml/xml `__. + min_backoff: + A task will be `scheduled `__ for retry + between [min\_backoff][google.cloud.tasks.v2beta2.RetryConfig. + min\_backoff] and [max\_backoff][google.cloud.tasks.v2beta2.Re + tryConfig.max\_backoff] duration after it fails, if the + queue's [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] + specifies that the task should be retried. If unspecified + when the queue is created, Cloud Tasks will pick the default. + This field is output only for `pull queues + `__. ``min_backoff`` + will be truncated to the nearest second. This field has the + same meaning as `min\_backoff\_seconds in queue.yaml/xml ` + __. + max_backoff: + A task will be `scheduled `__ for retry + between [min\_backoff][google.cloud.tasks.v2beta2.RetryConfig. + min\_backoff] and [max\_backoff][google.cloud.tasks.v2beta2.Re + tryConfig.max\_backoff] duration after it fails, if the + queue's [RetryConfig][google.cloud.tasks.v2beta2.RetryConfig] + specifies that the task should be retried. If unspecified + when the queue is created, Cloud Tasks will pick the default. + This field is output only for `pull queues + `__. ``max_backoff`` + will be truncated to the nearest second. This field has the + same meaning as `max\_backoff\_seconds in queue.yaml/xml ` + __. + max_doublings: + The time between retries will double ``max_doublings`` times. + A task's retry interval starts at [min\_backoff][google.cloud. + tasks.v2beta2.RetryConfig.min\_backoff], then doubles + ``max_doublings`` times, then increases linearly, and finally + retries retries at intervals of [max\_backoff][google.cloud.ta + sks.v2beta2.RetryConfig.max\_backoff] up to [max\_attempts][go + ogle.cloud.tasks.v2beta2.RetryConfig.max\_attempts] times. + For example, if [min\_backoff][google.cloud.tasks.v2beta2.Retr + yConfig.min\_backoff] is 10s, [max\_backoff][google.cloud.task + s.v2beta2.RetryConfig.max\_backoff] is 300s, and + ``max_doublings`` is 3, then the a task will first be retried + in 10s. The retry interval will double three times, and then + increase linearly by 2^3 \* 10s. Finally, the task will retry + at intervals of [max\_backoff][google.cloud.tasks.v2beta2.Retr + yConfig.max\_backoff] until the task has been attempted [max\_ + attempts][google.cloud.tasks.v2beta2.RetryConfig.max\_attempts + ] times. Thus, the requests will retry at 10s, 20s, 40s, 80s, + 160s, 240s, 300s, 300s, .... If unspecified when the queue is + created, Cloud Tasks will pick the default. This field is + output only for `pull queues + `__. This field has + the same meaning as `max\_doublings in queue.yaml/xml `__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.RetryConfig) + )) +_sym_db.RegisterMessage(RetryConfig) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\036com.google.cloud.tasks.v2beta2B\nQueueProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks')) +# @@protoc_insertion_point(module_scope) diff --git a/tasks/google/cloud/tasks_v2beta2/proto/queue_pb2_grpc.py b/tasks/google/cloud/tasks_v2beta2/proto/queue_pb2_grpc.py new file mode 100644 index 000000000000..a89435267cb2 --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/proto/queue_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + diff --git a/tasks/google/cloud/tasks_v2beta2/proto/target_pb2.py b/tasks/google/cloud/tasks_v2beta2/proto/target_pb2.py new file mode 100644 index 000000000000..2609f9d3b8ab --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/proto/target_pb2.py @@ -0,0 +1,679 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/tasks_v2beta2/proto/target.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/tasks_v2beta2/proto/target.proto', + package='google.cloud.tasks.v2beta2', + syntax='proto3', + serialized_pb=_b('\n-google/cloud/tasks_v2beta2/proto/target.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto\x1a\x1egoogle/protobuf/duration.proto\"\x0c\n\nPullTarget\"+\n\x0bPullMessage\x12\x0f\n\x07payload\x18\x01 \x01(\x0c\x12\x0b\n\x03tag\x18\x02 \x01(\t\"h\n\x13\x41ppEngineHttpTarget\x12Q\n\x1b\x61pp_engine_routing_override\x18\x01 \x01(\x0b\x32,.google.cloud.tasks.v2beta2.AppEngineRouting\"\xc4\x02\n\x14\x41ppEngineHttpRequest\x12;\n\x0bhttp_method\x18\x01 \x01(\x0e\x32&.google.cloud.tasks.v2beta2.HttpMethod\x12H\n\x12\x61pp_engine_routing\x18\x02 \x01(\x0b\x32,.google.cloud.tasks.v2beta2.AppEngineRouting\x12\x14\n\x0crelative_url\x18\x03 \x01(\t\x12N\n\x07headers\x18\x04 \x03(\x0b\x32=.google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry\x12\x0f\n\x07payload\x18\x05 \x01(\x0c\x1a.\n\x0cHeadersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"T\n\x10\x41ppEngineRouting\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x10\n\x08instance\x18\x03 \x01(\t\x12\x0c\n\x04host\x18\x04 \x01(\t*[\n\nHttpMethod\x12\x1b\n\x17HTTP_METHOD_UNSPECIFIED\x10\x00\x12\x08\n\x04POST\x10\x01\x12\x07\n\x03GET\x10\x02\x12\x08\n\x04HEAD\x10\x03\x12\x07\n\x03PUT\x10\x04\x12\n\n\x06\x44\x45LETE\x10\x05\x42p\n\x1e\x63om.google.cloud.tasks.v2beta2B\x0bTargetProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasksb\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,]) + +_HTTPMETHOD = _descriptor.EnumDescriptor( + name='HttpMethod', + full_name='google.cloud.tasks.v2beta2.HttpMethod', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='HTTP_METHOD_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='POST', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='GET', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HEAD', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='PUT', index=4, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DELETE', index=5, number=5, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=717, + serialized_end=808, +) +_sym_db.RegisterEnumDescriptor(_HTTPMETHOD) + +HttpMethod = enum_type_wrapper.EnumTypeWrapper(_HTTPMETHOD) +HTTP_METHOD_UNSPECIFIED = 0 +POST = 1 +GET = 2 +HEAD = 3 +PUT = 4 +DELETE = 5 + + + +_PULLTARGET = _descriptor.Descriptor( + name='PullTarget', + full_name='google.cloud.tasks.v2beta2.PullTarget', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=139, + serialized_end=151, +) + + +_PULLMESSAGE = _descriptor.Descriptor( + name='PullMessage', + full_name='google.cloud.tasks.v2beta2.PullMessage', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='payload', full_name='google.cloud.tasks.v2beta2.PullMessage.payload', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='tag', full_name='google.cloud.tasks.v2beta2.PullMessage.tag', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=153, + serialized_end=196, +) + + +_APPENGINEHTTPTARGET = _descriptor.Descriptor( + name='AppEngineHttpTarget', + full_name='google.cloud.tasks.v2beta2.AppEngineHttpTarget', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='app_engine_routing_override', full_name='google.cloud.tasks.v2beta2.AppEngineHttpTarget.app_engine_routing_override', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=198, + serialized_end=302, +) + + +_APPENGINEHTTPREQUEST_HEADERSENTRY = _descriptor.Descriptor( + name='HeadersEntry', + full_name='google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=583, + serialized_end=629, +) + +_APPENGINEHTTPREQUEST = _descriptor.Descriptor( + name='AppEngineHttpRequest', + full_name='google.cloud.tasks.v2beta2.AppEngineHttpRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='http_method', full_name='google.cloud.tasks.v2beta2.AppEngineHttpRequest.http_method', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='app_engine_routing', full_name='google.cloud.tasks.v2beta2.AppEngineHttpRequest.app_engine_routing', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='relative_url', full_name='google.cloud.tasks.v2beta2.AppEngineHttpRequest.relative_url', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='headers', full_name='google.cloud.tasks.v2beta2.AppEngineHttpRequest.headers', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='payload', full_name='google.cloud.tasks.v2beta2.AppEngineHttpRequest.payload', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_APPENGINEHTTPREQUEST_HEADERSENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=305, + serialized_end=629, +) + + +_APPENGINEROUTING = _descriptor.Descriptor( + name='AppEngineRouting', + full_name='google.cloud.tasks.v2beta2.AppEngineRouting', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='service', full_name='google.cloud.tasks.v2beta2.AppEngineRouting.service', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version', full_name='google.cloud.tasks.v2beta2.AppEngineRouting.version', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='instance', full_name='google.cloud.tasks.v2beta2.AppEngineRouting.instance', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='host', full_name='google.cloud.tasks.v2beta2.AppEngineRouting.host', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=631, + serialized_end=715, +) + +_APPENGINEHTTPTARGET.fields_by_name['app_engine_routing_override'].message_type = _APPENGINEROUTING +_APPENGINEHTTPREQUEST_HEADERSENTRY.containing_type = _APPENGINEHTTPREQUEST +_APPENGINEHTTPREQUEST.fields_by_name['http_method'].enum_type = _HTTPMETHOD +_APPENGINEHTTPREQUEST.fields_by_name['app_engine_routing'].message_type = _APPENGINEROUTING +_APPENGINEHTTPREQUEST.fields_by_name['headers'].message_type = _APPENGINEHTTPREQUEST_HEADERSENTRY +DESCRIPTOR.message_types_by_name['PullTarget'] = _PULLTARGET +DESCRIPTOR.message_types_by_name['PullMessage'] = _PULLMESSAGE +DESCRIPTOR.message_types_by_name['AppEngineHttpTarget'] = _APPENGINEHTTPTARGET +DESCRIPTOR.message_types_by_name['AppEngineHttpRequest'] = _APPENGINEHTTPREQUEST +DESCRIPTOR.message_types_by_name['AppEngineRouting'] = _APPENGINEROUTING +DESCRIPTOR.enum_types_by_name['HttpMethod'] = _HTTPMETHOD +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +PullTarget = _reflection.GeneratedProtocolMessageType('PullTarget', (_message.Message,), dict( + DESCRIPTOR = _PULLTARGET, + __module__ = 'google.cloud.tasks_v2beta2.proto.target_pb2' + , + __doc__ = """Pull target. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.PullTarget) + )) +_sym_db.RegisterMessage(PullTarget) + +PullMessage = _reflection.GeneratedProtocolMessageType('PullMessage', (_message.Message,), dict( + DESCRIPTOR = _PULLMESSAGE, + __module__ = 'google.cloud.tasks_v2beta2.proto.target_pb2' + , + __doc__ = """The pull message contains data that can be used by the caller of + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] to + process the task. + + This proto can only be used for tasks in a queue which has + [pull\_target][google.cloud.tasks.v2beta2.Queue.pull\_target] set. + + + Attributes: + payload: + A data payload consumed by the worker to execute the task. + tag: + The task's tag. Tags allow similar tasks to be processed in a + batch. If you label tasks with a tag, your worker can [lease + tasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] with + the same tag using + [filter][google.cloud.tasks.v2beta2.LeaseTasksRequest.filter]. + For example, if you want to aggregate the events associated + with a specific user once a day, you could tag tasks with the + user ID. The task's tag can only be set when the [task is + created][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + The tag must be less than 500 characters. SDK compatibility: + Although the SDK allows tags to be either string or `bytes `__, only UTF-8 encoded + tags can be used in Cloud Tasks. If a tag isn't UTF-8 encoded, + the tag will be empty when the task is returned by Cloud + Tasks. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.PullMessage) + )) +_sym_db.RegisterMessage(PullMessage) + +AppEngineHttpTarget = _reflection.GeneratedProtocolMessageType('AppEngineHttpTarget', (_message.Message,), dict( + DESCRIPTOR = _APPENGINEHTTPTARGET, + __module__ = 'google.cloud.tasks_v2beta2.proto.target_pb2' + , + __doc__ = """App Engine HTTP target. + + The task will be delivered to the App Engine application hostname + specified by its + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] + and + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest]. + The documentation for + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + explains how the task's host URL is constructed. + + Using + [AppEngineHttpTarget][google.cloud.tasks.v2beta2.AppEngineHttpTarget] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + + Attributes: + app_engine_routing_override: + Overrides for the [task-level app\_engine\_routing][google.clo + ud.tasks.v2beta2.AppEngineHttpRequest.app\_engine\_routing]. + If set, ``app_engine_routing_override`` is used for all tasks + in the queue, no matter what the setting is for the [task- + level app\_engine\_routing][google.cloud.tasks.v2beta2.AppEngi + neHttpRequest.app\_engine\_routing]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AppEngineHttpTarget) + )) +_sym_db.RegisterMessage(AppEngineHttpTarget) + +AppEngineHttpRequest = _reflection.GeneratedProtocolMessageType('AppEngineHttpRequest', (_message.Message,), dict( + + HeadersEntry = _reflection.GeneratedProtocolMessageType('HeadersEntry', (_message.Message,), dict( + DESCRIPTOR = _APPENGINEHTTPREQUEST_HEADERSENTRY, + __module__ = 'google.cloud.tasks_v2beta2.proto.target_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AppEngineHttpRequest.HeadersEntry) + )) + , + DESCRIPTOR = _APPENGINEHTTPREQUEST, + __module__ = 'google.cloud.tasks_v2beta2.proto.target_pb2' + , + __doc__ = """App Engine HTTP request. + + The message defines the HTTP request that is sent to an App Engine app + when the task is dispatched. + + This proto can only be used for tasks in a queue which has + [app\_engine\_http\_target][google.cloud.tasks.v2beta2.Queue.app\_engine\_http\_target] + set. + + Using + [AppEngineHttpRequest][google.cloud.tasks.v2beta2.AppEngineHttpRequest] + requires + ```appengine.applications.get`` `__ + Google IAM permission for the project and the following scope: + + ``https://www.googleapis.com/auth/cloud-platform`` + + The task will be delivered to the App Engine app which belongs to the + same project as the queue. For more information, see `How Requests are + Routed `__ and + how routing is affected by `dispatch + files `__. + + The [AppEngineRouting][google.cloud.tasks.v2beta2.AppEngineRouting] used + to construct the URL that the task is delivered to can be set at the + queue-level or task-level: + + - If set, + [app\_engine\_routing\_override][google.cloud.tasks.v2beta2.AppEngineHttpTarget.app\_engine\_routing\_override] + is used for all tasks in the queue, no matter what the setting is for + the [task-level + app\_engine\_routing][google.cloud.tasks.v2beta2.AppEngineHttpRequest.app\_engine\_routing]. + + The ``url`` that the task will be sent to is: + + - ``url =`` [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] + ``+`` + [relative\_url][google.cloud.tasks.v2beta2.AppEngineHttpRequest.relative\_url] + + The task attempt has succeeded if the app's request handler returns an + HTTP response code in the range [``200`` - ``299``]. ``503`` is + considered an App Engine system error instead of an application error. + Requests returning error ``503`` will be retried regardless of retry + configuration and not counted against retry counts. Any other response + code or a failure to receive a response before the deadline is a failed + attempt. + + + Attributes: + http_method: + The HTTP method to use for the request. The default is POST. + The app's request handler for the task's target URL must be + able to handle HTTP requests with this http\_method, otherwise + the task attempt will fail with error code 405 (Method Not + Allowed). See `Writing a push task request handler + `__ and the + documentation for the request handlers in the language your + app is written in e.g. `Python Request Handler + `__. + app_engine_routing: + Task-level setting for App Engine routing. If set, [app\_engi + ne\_routing\_override][google.cloud.tasks.v2beta2.AppEngineHtt + pTarget.app\_engine\_routing\_override] is used for all tasks + in the queue, no matter what the setting is for the [task- + level app\_engine\_routing][google.cloud.tasks.v2beta2.AppEngi + neHttpRequest.app\_engine\_routing]. + relative_url: + The relative URL. The relative URL must begin with "/" and + must be a valid HTTP relative URL. It can contain a path and + query string arguments. If the relative URL is empty, then the + root path "/" will be used. No spaces are allowed, and the + maximum length allowed is 2083 characters. + headers: + HTTP request headers. This map contains the header field + names and values. Headers can be set when the `task is created + `__. + Repeated headers are not supported but a header value can + contain commas. Cloud Tasks sets some headers to default + values: - ``User-Agent``: By default, this header is + ``"AppEngine-Google; (+http://code.google.com/appengine)"``. + This header can be modified, but Cloud Tasks will append + ``"AppEngine-Google; (+http://code.google.com/appengine)"`` to + the modified ``User-Agent``. If the task has a [payload][g + oogle.cloud.tasks.v2beta2.AppEngineHttpRequest.payload], Cloud + Tasks sets the following headers: - ``Content-Type``: By + default, the ``Content-Type`` header is set to + ``"application/octet-stream"``. The default can be overridden + by explicitly setting ``Content-Type`` to a particular + media type when the `task is created + `__. For + example, ``Content-Type`` can be set to + ``"application/json"``. - ``Content-Length``: This is + computed by Cloud Tasks. This value is output only. It + cannot be changed. The headers below cannot be set or + overridden: - ``Host`` - ``X-Google-*`` - + ``X-AppEngine-*`` In addition, Cloud Tasks sets some headers + when the task is dispatched, such as headers containing + information about the task; see `request headers + `__. These headers are set + only when the task is dispatched, so they are not visible when + the task is returned in a Cloud Tasks response. Although + there is no specific limit for the maximum number of headers + or the size, there is a limit on the maximum size of the + [Task][google.cloud.tasks.v2beta2.Task]. For more information, + see the + [CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask] + documentation. + payload: + Payload. The payload will be sent as the HTTP message body. A + message body, and thus a payload, is allowed only if the HTTP + method is POST or PUT. It is an error to set a data payload on + a task with an incompatible + [HttpMethod][google.cloud.tasks.v2beta2.HttpMethod]. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AppEngineHttpRequest) + )) +_sym_db.RegisterMessage(AppEngineHttpRequest) +_sym_db.RegisterMessage(AppEngineHttpRequest.HeadersEntry) + +AppEngineRouting = _reflection.GeneratedProtocolMessageType('AppEngineRouting', (_message.Message,), dict( + DESCRIPTOR = _APPENGINEROUTING, + __module__ = 'google.cloud.tasks_v2beta2.proto.target_pb2' + , + __doc__ = """App Engine Routing. + + For more information about services, versions, and instances see `An + Overview of App + Engine `__, + `Microservices Architecture on Google App + Engine `__, `App + Engine Standard request + routing `__, + and `App Engine Flex request + routing `__. + + + Attributes: + service: + App service. By default, the task is sent to the service + which is the default service when the task is attempted + ("default"). For some queues or tasks which were created + using the App Engine Task Queue API, + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable into [service][google.cloud.tasks.v2beta2.AppEngi + neRouting.service], [version][google.cloud.tasks.v2beta2.AppEn + gineRouting.version], and [instance][google.cloud.tasks.v2beta + 2.AppEngineRouting.instance]. For example, some tasks which + were created using the App Engine SDK use a custom domain + name; custom domains are not parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable, then [service][google.cloud.tasks.v2beta2.AppEng + ineRouting.service], [version][google.cloud.tasks.v2beta2.AppE + ngineRouting.version], and [instance][google.cloud.tasks.v2bet + a2.AppEngineRouting.instance] are the empty string. + version: + App version. By default, the task is sent to the version + which is the default version when the task is attempted + ("default"). For some queues or tasks which were created + using the App Engine Task Queue API, + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable into [service][google.cloud.tasks.v2beta2.AppEngi + neRouting.service], [version][google.cloud.tasks.v2beta2.AppEn + gineRouting.version], and [instance][google.cloud.tasks.v2beta + 2.AppEngineRouting.instance]. For example, some tasks which + were created using the App Engine SDK use a custom domain + name; custom domains are not parsed by Cloud Tasks. If + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + not parsable, then [service][google.cloud.tasks.v2beta2.AppEng + ineRouting.service], [version][google.cloud.tasks.v2beta2.AppE + ngineRouting.version], and [instance][google.cloud.tasks.v2bet + a2.AppEngineRouting.instance] are the empty string. + instance: + App instance. By default, the task is sent to an instance + which is available when the task is attempted. Requests can + only be sent to a specific instance if `manual scaling is used + in App Engine Standard `__. + App Engine Flex does not support instances. For more + information, see `App Engine Standard request routing + `__ + and `App Engine Flex request routing + `__. + host: + Output only. The host that the task is sent to. For more + information, see `How Requests are Routed + `__. + The host is constructed as: - ``host = + [application_domain_name]``\ ``| [service] + '.' + + [application_domain_name]``\ ``| [version] + '.' + + [application_domain_name]``\ ``| [version_dot_service]+ + '.' + [application_domain_name]``\ ``| [instance] + '.' + + [application_domain_name]``\ ``| [instance_dot_service] + + '.' + [application_domain_name]``\ ``| + [instance_dot_version] + '.' + [application_domain_name]``\ + ``| [instance_dot_version_dot_service] + '.' + + [application_domain_name]`` - ``application_domain_name`` = + The domain name of the app, for example .appspot.com, which + is associated with the queue's project ID. Some tasks which + were created using the App Engine SDK use a custom domain + name. - ``service =`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + - ``version =`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + - ``version_dot_service =`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + - ``instance =`` [instance][google.cloud.tasks.v2beta2.App + EngineRouting.instance] - ``instance_dot_service =`` [ins + tance][google.cloud.tasks.v2beta2.AppEngineRouting.instance] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + - ``instance_dot_version =`` [instance][google.cloud.tasks + .v2beta2.AppEngineRouting.instance] ``+ '.' +`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + - ``instance_dot_version_dot_service =`` [instance][google + .cloud.tasks.v2beta2.AppEngineRouting.instance] ``+ '.' +`` + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + ``+ '.' +`` + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + If + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + is empty, then the task will be sent to the service which is + the default service when the task is attempted. If + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + is empty, then the task will be sent to the version which is + the default version when the task is attempted. If [instance] + [google.cloud.tasks.v2beta2.AppEngineRouting.instance] is + empty, then the task will be sent to an instance which is + available when the task is attempted. When + [service][google.cloud.tasks.v2beta2.AppEngineRouting.service] + is "default", + [version][google.cloud.tasks.v2beta2.AppEngineRouting.version] + is "default", and [instance][google.cloud.tasks.v2beta2.AppEng + ineRouting.instance] is empty, + [host][google.cloud.tasks.v2beta2.AppEngineRouting.host] is + shortened to just the ``application_domain_name``. If [servic + e][google.cloud.tasks.v2beta2.AppEngineRouting.service], [vers + ion][google.cloud.tasks.v2beta2.AppEngineRouting.version], or + [instance][google.cloud.tasks.v2beta2.AppEngineRouting.instanc + e] is invalid, then the task will be sent to the default + version of the default service when the task is attempted. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AppEngineRouting) + )) +_sym_db.RegisterMessage(AppEngineRouting) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\036com.google.cloud.tasks.v2beta2B\013TargetProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks')) +_APPENGINEHTTPREQUEST_HEADERSENTRY.has_options = True +_APPENGINEHTTPREQUEST_HEADERSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +# @@protoc_insertion_point(module_scope) diff --git a/tasks/google/cloud/tasks_v2beta2/proto/target_pb2_grpc.py b/tasks/google/cloud/tasks_v2beta2/proto/target_pb2_grpc.py new file mode 100644 index 000000000000..a89435267cb2 --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/proto/target_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + diff --git a/tasks/google/cloud/tasks_v2beta2/proto/task_pb2.py b/tasks/google/cloud/tasks_v2beta2/proto/task_pb2.py new file mode 100644 index 000000000000..4d324816d170 --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/proto/task_pb2.py @@ -0,0 +1,393 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/tasks_v2beta2/proto/task.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.tasks_v2beta2.proto import target_pb2 as google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/tasks_v2beta2/proto/task.proto', + package='google.cloud.tasks.v2beta2', + syntax='proto3', + serialized_pb=_b('\n+google/cloud/tasks_v2beta2/proto/task.proto\x12\x1agoogle.cloud.tasks.v2beta2\x1a\x1cgoogle/api/annotations.proto\x1a-google/cloud/tasks_v2beta2/proto/target.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\xbe\x03\n\x04Task\x12\x0c\n\x04name\x18\x01 \x01(\t\x12S\n\x17\x61pp_engine_http_request\x18\x03 \x01(\x0b\x32\x30.google.cloud.tasks.v2beta2.AppEngineHttpRequestH\x00\x12?\n\x0cpull_message\x18\x04 \x01(\x0b\x32\'.google.cloud.tasks.v2beta2.PullMessageH\x00\x12\x31\n\rschedule_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x36\n\x06status\x18\x07 \x01(\x0b\x32&.google.cloud.tasks.v2beta2.TaskStatus\x12\x33\n\x04view\x18\x08 \x01(\x0e\x32%.google.cloud.tasks.v2beta2.Task.View\"1\n\x04View\x12\x14\n\x10VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02\x42\x0e\n\x0cpayload_type\"\xdd\x01\n\nTaskStatus\x12\x1e\n\x16\x61ttempt_dispatch_count\x18\x01 \x01(\x05\x12\x1e\n\x16\x61ttempt_response_count\x18\x02 \x01(\x05\x12G\n\x14\x66irst_attempt_status\x18\x03 \x01(\x0b\x32).google.cloud.tasks.v2beta2.AttemptStatus\x12\x46\n\x13last_attempt_status\x18\x04 \x01(\x0b\x32).google.cloud.tasks.v2beta2.AttemptStatus\"\xd5\x01\n\rAttemptStatus\x12\x31\n\rschedule_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rdispatch_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x31\n\rresponse_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12+\n\x0fresponse_status\x18\x04 \x01(\x0b\x32\x12.google.rpc.StatusBn\n\x1e\x63om.google.cloud.tasks.v2beta2B\tTaskProtoP\x01Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasksb\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) + + + +_TASK_VIEW = _descriptor.EnumDescriptor( + name='View', + full_name='google.cloud.tasks.v2beta2.Task.View', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VIEW_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BASIC', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FULL', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=592, + serialized_end=641, +) +_sym_db.RegisterEnumDescriptor(_TASK_VIEW) + + +_TASK = _descriptor.Descriptor( + name='Task', + full_name='google.cloud.tasks.v2beta2.Task', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.tasks.v2beta2.Task.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='app_engine_http_request', full_name='google.cloud.tasks.v2beta2.Task.app_engine_http_request', index=1, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pull_message', full_name='google.cloud.tasks.v2beta2.Task.pull_message', index=2, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='schedule_time', full_name='google.cloud.tasks.v2beta2.Task.schedule_time', index=3, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='create_time', full_name='google.cloud.tasks.v2beta2.Task.create_time', index=4, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='status', full_name='google.cloud.tasks.v2beta2.Task.status', index=5, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='view', full_name='google.cloud.tasks.v2beta2.Task.view', index=6, + number=8, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _TASK_VIEW, + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='payload_type', full_name='google.cloud.tasks.v2beta2.Task.payload_type', + index=0, containing_type=None, fields=[]), + ], + serialized_start=211, + serialized_end=657, +) + + +_TASKSTATUS = _descriptor.Descriptor( + name='TaskStatus', + full_name='google.cloud.tasks.v2beta2.TaskStatus', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='attempt_dispatch_count', full_name='google.cloud.tasks.v2beta2.TaskStatus.attempt_dispatch_count', index=0, + number=1, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='attempt_response_count', full_name='google.cloud.tasks.v2beta2.TaskStatus.attempt_response_count', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='first_attempt_status', full_name='google.cloud.tasks.v2beta2.TaskStatus.first_attempt_status', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='last_attempt_status', full_name='google.cloud.tasks.v2beta2.TaskStatus.last_attempt_status', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=660, + serialized_end=881, +) + + +_ATTEMPTSTATUS = _descriptor.Descriptor( + name='AttemptStatus', + full_name='google.cloud.tasks.v2beta2.AttemptStatus', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='schedule_time', full_name='google.cloud.tasks.v2beta2.AttemptStatus.schedule_time', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='dispatch_time', full_name='google.cloud.tasks.v2beta2.AttemptStatus.dispatch_time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='response_time', full_name='google.cloud.tasks.v2beta2.AttemptStatus.response_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='response_status', full_name='google.cloud.tasks.v2beta2.AttemptStatus.response_status', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=884, + serialized_end=1097, +) + +_TASK.fields_by_name['app_engine_http_request'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2._APPENGINEHTTPREQUEST +_TASK.fields_by_name['pull_message'].message_type = google_dot_cloud_dot_tasks__v2beta2_dot_proto_dot_target__pb2._PULLMESSAGE +_TASK.fields_by_name['schedule_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TASK.fields_by_name['create_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TASK.fields_by_name['status'].message_type = _TASKSTATUS +_TASK.fields_by_name['view'].enum_type = _TASK_VIEW +_TASK_VIEW.containing_type = _TASK +_TASK.oneofs_by_name['payload_type'].fields.append( + _TASK.fields_by_name['app_engine_http_request']) +_TASK.fields_by_name['app_engine_http_request'].containing_oneof = _TASK.oneofs_by_name['payload_type'] +_TASK.oneofs_by_name['payload_type'].fields.append( + _TASK.fields_by_name['pull_message']) +_TASK.fields_by_name['pull_message'].containing_oneof = _TASK.oneofs_by_name['payload_type'] +_TASKSTATUS.fields_by_name['first_attempt_status'].message_type = _ATTEMPTSTATUS +_TASKSTATUS.fields_by_name['last_attempt_status'].message_type = _ATTEMPTSTATUS +_ATTEMPTSTATUS.fields_by_name['schedule_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_ATTEMPTSTATUS.fields_by_name['dispatch_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_ATTEMPTSTATUS.fields_by_name['response_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_ATTEMPTSTATUS.fields_by_name['response_status'].message_type = google_dot_rpc_dot_status__pb2._STATUS +DESCRIPTOR.message_types_by_name['Task'] = _TASK +DESCRIPTOR.message_types_by_name['TaskStatus'] = _TASKSTATUS +DESCRIPTOR.message_types_by_name['AttemptStatus'] = _ATTEMPTSTATUS +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Task = _reflection.GeneratedProtocolMessageType('Task', (_message.Message,), dict( + DESCRIPTOR = _TASK, + __module__ = 'google.cloud.tasks_v2beta2.proto.task_pb2' + , + __doc__ = """A unit of scheduled work. + + + Attributes: + name: + The task name. The task name must have the following format: + ``projects/PROJECT_ID/locations/LOCATION_ID/queues/QUEUE_ID/ta + sks/TASK_ID`` - ``PROJECT_ID`` can contain letters + ([A-Za-z]), numbers ([0-9]), hyphens (-), colons (:), or + periods (.). For more information, see `Identifying + projects `__ - ``LOCATION_ID`` is the + canonical ID for the task's location. The list of available + locations can be obtained by calling [ListLocations][google + .cloud.location.Locations.ListLocations]. For more + information, see https://cloud.google.com/about/locations/. - + ``QUEUE_ID`` can contain letters ([A-Za-z]), numbers ([0-9]), + or hyphens (-). The maximum length is 100 characters. - + ``TASK_ID`` can contain only letters ([A-Za-z]), numbers + ([0-9]), hyphens (-), or underscores (\_). The maximum + length is 500 characters. Optionally caller-specified in [ + CreateTask][google.cloud.tasks.v2beta2.CloudTasks.CreateTask]. + payload_type: + Required. The task's payload is used by the task's target to + process the task. A payload is valid only if it is compatible + with the queue's target. + app_engine_http_request: + App Engine HTTP request that is sent to the task's target. Can + be set only if [app\_engine\_http\_target][google.cloud.tasks. + v2beta2.Queue.app\_engine\_http\_target] is set on the queue. + An App Engine task is a task that has [AppEngineHttpRequest][g + oogle.cloud.tasks.v2beta2.AppEngineHttpRequest] set. + pull_message: + [LeaseTasks][google.cloud.tasks.v2beta2.CloudTasks.LeaseTasks] + to process the task. Can be set only if + [pull\_target][google.cloud.tasks.v2beta2.Queue.pull\_target] + is set on the queue. A pull task is a task that has + [PullMessage][google.cloud.tasks.v2beta2.PullMessage] set. + schedule_time: + The time when the task is scheduled to be attempted. For App + Engine queues, this is when the task will be attempted or + retried. For pull queues, this is the time when the task is + available to be leased; if a task is currently leased, this is + the time when the current lease expires, that is, the time + that the task was leased plus the [lease\_duration][google.clo + ud.tasks.v2beta2.LeaseTasksRequest.lease\_duration]. + ``schedule_time`` will be truncated to the nearest + microsecond. + create_time: + Output only. The time that the task was created. + ``create_time`` will be truncated to the nearest second. + status: + Output only. The task status. + view: + Output only. The view specifies which subset of the + [Task][google.cloud.tasks.v2beta2.Task] has been returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.Task) + )) +_sym_db.RegisterMessage(Task) + +TaskStatus = _reflection.GeneratedProtocolMessageType('TaskStatus', (_message.Message,), dict( + DESCRIPTOR = _TASKSTATUS, + __module__ = 'google.cloud.tasks_v2beta2.proto.task_pb2' + , + __doc__ = """Status of the task. + + + Attributes: + attempt_dispatch_count: + Output only. The number of attempts dispatched. This count + includes tasks which have been dispatched but haven't received + a response. + attempt_response_count: + Output only. The number of attempts which have received a + response. This field is not calculated for `pull tasks + `__. + first_attempt_status: + Output only. The status of the task's first attempt. Only [di + spatch\_time][google.cloud.tasks.v2beta2.AttemptStatus.dispatc + h\_time] will be set. The other + [AttemptStatus][google.cloud.tasks.v2beta2.AttemptStatus] + information is not retained by Cloud Tasks. This field is not + calculated for `pull tasks + `__. + last_attempt_status: + Output only. The status of the task's last attempt. This + field is not calculated for `pull tasks + `__. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.TaskStatus) + )) +_sym_db.RegisterMessage(TaskStatus) + +AttemptStatus = _reflection.GeneratedProtocolMessageType('AttemptStatus', (_message.Message,), dict( + DESCRIPTOR = _ATTEMPTSTATUS, + __module__ = 'google.cloud.tasks_v2beta2.proto.task_pb2' + , + __doc__ = """The status of a task attempt. + + + Attributes: + schedule_time: + Output only. The time that this attempt was scheduled. + ``schedule_time`` will be truncated to the nearest + microsecond. + dispatch_time: + Output only. The time that this attempt was dispatched. + ``dispatch_time`` will be truncated to the nearest + microsecond. + response_time: + Output only. The time that this attempt response was received. + ``response_time`` will be truncated to the nearest + microsecond. + response_status: + Output only. The response from the target for this attempt. + If the task has not been attempted or the task is currently + running then the response status is unset. + """, + # @@protoc_insertion_point(class_scope:google.cloud.tasks.v2beta2.AttemptStatus) + )) +_sym_db.RegisterMessage(AttemptStatus) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\036com.google.cloud.tasks.v2beta2B\tTaskProtoP\001Z?google.golang.org/genproto/googleapis/cloud/tasks/v2beta2;tasks')) +# @@protoc_insertion_point(module_scope) diff --git a/tasks/google/cloud/tasks_v2beta2/proto/task_pb2_grpc.py b/tasks/google/cloud/tasks_v2beta2/proto/task_pb2_grpc.py new file mode 100644 index 000000000000..a89435267cb2 --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/proto/task_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + diff --git a/tasks/google/cloud/tasks_v2beta2/types.py b/tasks/google/cloud/tasks_v2beta2/types.py new file mode 100644 index 000000000000..e9367f78cce5 --- /dev/null +++ b/tasks/google/cloud/tasks_v2beta2/types.py @@ -0,0 +1,57 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.api_core.protobuf_helpers import get_messages + +from google.api import http_pb2 +from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2 +from google.cloud.tasks_v2beta2.proto import queue_pb2 +from google.cloud.tasks_v2beta2.proto import target_pb2 +from google.cloud.tasks_v2beta2.proto import task_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import any_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 +from google.rpc import status_pb2 + +names = [] +for module in ( + http_pb2, + cloudtasks_pb2, + queue_pb2, + target_pb2, + task_pb2, + iam_policy_pb2, + policy_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, +): + for name, message in get_messages(module).items(): + message.__module__ = 'google.cloud.tasks_v2beta2.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + +__all__ = tuple(sorted(names)) diff --git a/tasks/nox.py b/tasks/nox.py new file mode 100644 index 000000000000..180dfc19fe88 --- /dev/null +++ b/tasks/nox.py @@ -0,0 +1,52 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import os + +import nox + + +@nox.session +def default(session): + return unit(session, 'default') + + +@nox.session +@nox.parametrize('py', ['2.7', '3.4', '3.5', '3.6']) +def unit(session, py): + """Run the unit test suite.""" + + # Run unit tests against all supported versions of Python. + if py != 'default': + session.interpreter = 'python{}'.format(py) + + # Set the virtualenv directory name. + session.virtualenv_dirname = 'unit-' + py + + # Install all test dependencies, then install this package in-place. + session.install('pytest') + session.install('-e', '.') + + # Run py.test against the unit tests. + session.run('py.test', '--quiet', os.path.join('tests', 'unit')) + + +@nox.session +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.interpreter = 'python3.6' + session.install('docutils', 'pygments') + session.run('python', 'setup.py', 'check', '--restructuredtext', + '--strict') diff --git a/tasks/setup.cfg b/tasks/setup.cfg new file mode 100644 index 000000000000..2a9acf13daa9 --- /dev/null +++ b/tasks/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/tasks/setup.py b/tasks/setup.py new file mode 100644 index 000000000000..cd291a0d0f1b --- /dev/null +++ b/tasks/setup.py @@ -0,0 +1,74 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +import setuptools + +name = 'google-cloud-cloudtasks' +description = 'Cloud Tasks API API client library' +version = '0.1.0' +release_status = 'Development Status :: 3 - Alpha' +dependencies = [ + 'google-api-core[grpc] >= 1.1.0, < 2.0.0dev', + 'enum34; python_version < "3.4"', + 'grpc-google-iam-v1<0.12dev,>=0.11.4', +] + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +packages = [ + package for package in setuptools.find_packages() + if package.startswith('google') +] + +namespaces = ['google'] +if 'google.cloud' in packages: + namespaces.append('google.cloud') + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author='Google LLC', + author_email='googleapis-packages@oogle.com', + license='Apache 2.0', + url='https://github.com/GoogleCloudPlatform/google-cloud-python', + classifiers=[ + release_status, + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Operating System :: OS Independent', + 'Topic :: Internet', + ], + platforms='Posix; MacOS X; Windows', + packages=packages, + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/tasks/tests/unit/gapic/v2beta2/test_cloud_tasks_client_v2beta2.py b/tasks/tests/unit/gapic/v2beta2/test_cloud_tasks_client_v2beta2.py new file mode 100644 index 000000000000..80bf606ceee7 --- /dev/null +++ b/tasks/tests/unit/gapic/v2beta2/test_cloud_tasks_client_v2beta2.py @@ -0,0 +1,735 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.cloud import tasks_v2beta2 +from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2 +from google.cloud.tasks_v2beta2.proto import queue_pb2 +from google.cloud.tasks_v2beta2.proto import task_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import timestamp_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestCloudTasksClient(object): + def test_list_queues(self): + # Setup Expected Response + next_page_token = '' + queues_element = {} + queues = [queues_element] + expected_response = { + 'next_page_token': next_page_token, + 'queues': queues + } + expected_response = cloudtasks_pb2.ListQueuesResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + parent = client.location_path('[PROJECT]', '[LOCATION]') + + paged_list_response = client.list_queues(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.queues[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.ListQueuesRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_queues_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + parent = client.location_path('[PROJECT]', '[LOCATION]') + + paged_list_response = client.list_queues(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_queue(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + expected_response = {'name': name_2} + expected_response = queue_pb2.Queue(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + response = client.get_queue(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.GetQueueRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_queue_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + with pytest.raises(CustomException): + client.get_queue(name) + + def test_create_queue(self): + # Setup Expected Response + name = 'name3373707' + expected_response = {'name': name} + expected_response = queue_pb2.Queue(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + parent = client.location_path('[PROJECT]', '[LOCATION]') + queue = {} + + response = client.create_queue(parent, queue) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.CreateQueueRequest( + parent=parent, queue=queue) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_queue_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + parent = client.location_path('[PROJECT]', '[LOCATION]') + queue = {} + + with pytest.raises(CustomException): + client.create_queue(parent, queue) + + def test_update_queue(self): + # Setup Expected Response + name = 'name3373707' + expected_response = {'name': name} + expected_response = queue_pb2.Queue(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + queue = {} + + response = client.update_queue(queue) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.UpdateQueueRequest(queue=queue) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_queue_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + queue = {} + + with pytest.raises(CustomException): + client.update_queue(queue) + + def test_delete_queue(self): + channel = ChannelStub() + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + client.delete_queue(name) + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.DeleteQueueRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_queue_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + with pytest.raises(CustomException): + client.delete_queue(name) + + def test_purge_queue(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + expected_response = {'name': name_2} + expected_response = queue_pb2.Queue(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + response = client.purge_queue(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.PurgeQueueRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_purge_queue_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + with pytest.raises(CustomException): + client.purge_queue(name) + + def test_pause_queue(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + expected_response = {'name': name_2} + expected_response = queue_pb2.Queue(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + response = client.pause_queue(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.PauseQueueRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_pause_queue_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + with pytest.raises(CustomException): + client.pause_queue(name) + + def test_resume_queue(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + expected_response = {'name': name_2} + expected_response = queue_pb2.Queue(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + response = client.resume_queue(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.ResumeQueueRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_resume_queue_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + with pytest.raises(CustomException): + client.resume_queue(name) + + def test_get_iam_policy(self): + # Setup Expected Response + version = 351608024 + etag = b'21' + expected_response = {'version': version, 'etag': etag} + expected_response = policy_pb2.Policy(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + response = client.get_iam_policy(resource) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.GetIamPolicyRequest( + resource=resource) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_iam_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + with pytest.raises(CustomException): + client.get_iam_policy(resource) + + def test_set_iam_policy(self): + # Setup Expected Response + version = 351608024 + etag = b'21' + expected_response = {'version': version, 'etag': etag} + expected_response = policy_pb2.Policy(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + policy = {} + + response = client.set_iam_policy(resource, policy) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_set_iam_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + policy = {} + + with pytest.raises(CustomException): + client.set_iam_policy(resource, policy) + + def test_test_iam_permissions(self): + # Setup Expected Response + expected_response = {} + expected_response = iam_policy_pb2.TestIamPermissionsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + permissions = [] + + response = client.test_iam_permissions(resource, permissions) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_test_iam_permissions_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + resource = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + permissions = [] + + with pytest.raises(CustomException): + client.test_iam_permissions(resource, permissions) + + def test_list_tasks(self): + # Setup Expected Response + next_page_token = '' + tasks_element = {} + tasks = [tasks_element] + expected_response = { + 'next_page_token': next_page_token, + 'tasks': tasks + } + expected_response = cloudtasks_pb2.ListTasksResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + paged_list_response = client.list_tasks(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.tasks[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.ListTasksRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_tasks_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + + paged_list_response = client.list_tasks(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_get_task(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + expected_response = {'name': name_2} + expected_response = task_pb2.Task(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + + response = client.get_task(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.GetTaskRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_task_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + + with pytest.raises(CustomException): + client.get_task(name) + + def test_create_task(self): + # Setup Expected Response + name = 'name3373707' + expected_response = {'name': name} + expected_response = task_pb2.Task(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + task = {} + + response = client.create_task(parent, task) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.CreateTaskRequest( + parent=parent, task=task) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_task_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + task = {} + + with pytest.raises(CustomException): + client.create_task(parent, task) + + def test_delete_task(self): + channel = ChannelStub() + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + + client.delete_task(name) + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.DeleteTaskRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_task_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + + with pytest.raises(CustomException): + client.delete_task(name) + + def test_lease_tasks(self): + # Setup Expected Response + expected_response = {} + expected_response = cloudtasks_pb2.LeaseTasksResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + lease_duration = {} + + response = client.lease_tasks(parent, lease_duration) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.LeaseTasksRequest( + parent=parent, lease_duration=lease_duration) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_lease_tasks_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + parent = client.queue_path('[PROJECT]', '[LOCATION]', '[QUEUE]') + lease_duration = {} + + with pytest.raises(CustomException): + client.lease_tasks(parent, lease_duration) + + def test_acknowledge_task(self): + channel = ChannelStub() + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + schedule_time = {} + + client.acknowledge_task(name, schedule_time) + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.AcknowledgeTaskRequest( + name=name, schedule_time=schedule_time) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_acknowledge_task_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + schedule_time = {} + + with pytest.raises(CustomException): + client.acknowledge_task(name, schedule_time) + + def test_renew_lease(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + expected_response = {'name': name_2} + expected_response = task_pb2.Task(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + schedule_time = {} + lease_duration = {} + + response = client.renew_lease(name, schedule_time, lease_duration) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.RenewLeaseRequest( + name=name, + schedule_time=schedule_time, + lease_duration=lease_duration) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_renew_lease_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + schedule_time = {} + lease_duration = {} + + with pytest.raises(CustomException): + client.renew_lease(name, schedule_time, lease_duration) + + def test_cancel_lease(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + expected_response = {'name': name_2} + expected_response = task_pb2.Task(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + schedule_time = {} + + response = client.cancel_lease(name, schedule_time) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.CancelLeaseRequest( + name=name, schedule_time=schedule_time) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_cancel_lease_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + schedule_time = {} + + with pytest.raises(CustomException): + client.cancel_lease(name, schedule_time) + + def test_run_task(self): + # Setup Expected Response + name_2 = 'name2-1052831874' + expected_response = {'name': name_2} + expected_response = task_pb2.Task(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup Request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + + response = client.run_task(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = cloudtasks_pb2.RunTaskRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_run_task_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = tasks_v2beta2.CloudTasksClient(channel=channel) + + # Setup request + name = client.task_path('[PROJECT]', '[LOCATION]', '[QUEUE]', '[TASK]') + + with pytest.raises(CustomException): + client.run_task(name) From c1fa3b697788027b78ad8d0fcfecebb30d6a0d8b Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Mon, 21 May 2018 19:20:39 -0400 Subject: [PATCH 20/75] Release tasks 0.1.0 (#5354) * Release Tasks 0.1.0 --- tasks/CHANGELOG.md | 11 +++++++++++ .../cloud/tasks_v2beta2/gapic/cloud_tasks_client.py | 2 +- tasks/setup.py | 2 +- 3 files changed, 13 insertions(+), 2 deletions(-) create mode 100644 tasks/CHANGELOG.md diff --git a/tasks/CHANGELOG.md b/tasks/CHANGELOG.md new file mode 100644 index 000000000000..836e9261de32 --- /dev/null +++ b/tasks/CHANGELOG.md @@ -0,0 +1,11 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-tasks/#history + +## 0.1.0 + +### New Features +- Add v2beta2 endpoint for Tasks + diff --git a/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py b/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py index 6a7fb430d0d8..ef5ba265b2ae 100644 --- a/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py +++ b/tasks/google/cloud/tasks_v2beta2/gapic/cloud_tasks_client.py @@ -38,7 +38,7 @@ from google.protobuf import timestamp_pb2 _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - 'google-cloud-cloudtasks', ).version + 'google-cloud-tasks', ).version class CloudTasksClient(object): diff --git a/tasks/setup.py b/tasks/setup.py index cd291a0d0f1b..bff46d1f0bef 100644 --- a/tasks/setup.py +++ b/tasks/setup.py @@ -17,7 +17,7 @@ import setuptools -name = 'google-cloud-cloudtasks' +name = 'google-cloud-tasks' description = 'Cloud Tasks API API client library' version = '0.1.0' release_status = 'Development Status :: 3 - Alpha' From 8849f10af4fb4212f5a5822c3580beea8b8ac332 Mon Sep 17 00:00:00 2001 From: Dan O'Meara Date: Tue, 22 May 2018 10:04:17 -0700 Subject: [PATCH 21/75] Add Redis to master README (#5358) --- README.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.rst b/README.rst index 905c75d6accf..3fce76437a47 100644 --- a/README.rst +++ b/README.rst @@ -63,6 +63,7 @@ Cloud Platform services: - `Google Cloud Bigtable - HappyBase`_ (`HappyBase README`_) - `Google Cloud Container`_ (`Container README`_) - `Google Cloud DNS`_ (`DNS README`_) +- `Google Cloud Memorystore for Redis`_ (`Redis README`_) - `Google Cloud Resource Manager`_ (`Resource Manager README`_) - `Google Cloud Runtime Configuration`_ (`Runtime Config README`_) - `Stackdriver Error Reporting`_ (`Error Reporting README`_) @@ -115,6 +116,8 @@ updates. See `versioning`_ for more details. .. _versioning: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/CONTRIBUTING.rst#versioning .. _Google Cloud Firestore: https://pypi.org/project/google-cloud-firestore/ .. _Firestore README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/firestore +.. _Google Cloud Memorystore for Redis: https://pypi.org/project/google-cloud-redis/ +.. _Redis README: https://github.com/GoogleCloudPlatform/google-cloud-python/tree/master/redis If you need support for other Google APIs, check out the `Google APIs Python Client library`_. From 38bd4abb6c701f7950e4f9e72020a27d7b7d5d38 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 22 May 2018 14:57:09 -0400 Subject: [PATCH 22/75] Unit tests require grpcio. (#5363) --- redis/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redis/setup.py b/redis/setup.py index 63c07704b1c7..c760bf9ccaac 100644 --- a/redis/setup.py +++ b/redis/setup.py @@ -29,7 +29,7 @@ # 'Development Status :: 5 - Production/Stable' release_status = 'Development Status :: 3 - Alpha' dependencies = [ - 'google-api-core >= 0.1.0, < 0.2.0dev', + 'google-api-core[grpc] >= 0.1.0, < 0.2.0dev', ] extras = { } From 47926cd1767b14d28d5da2261736afc09737be12 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 22 May 2018 13:02:49 -0700 Subject: [PATCH 23/75] BigQuery: use autosummary to split up API reference docs (#5340) * BigQuery: use autosummary to split up API reference docs * Move generated docs stubs to generated directory. * Rename Job enums heading. Comment changes from upstream autosummary. * Remove duplicate documentation of RowIterator.pages. --- bigquery/google/cloud/bigquery/table.py | 2 - docs/_templates/autosummary/class.rst | 1 + docs/_templates/autosummary/module.rst | 1 + docs/bigquery/.gitignore | 1 + docs/bigquery/reference.rst | 118 ++++++-- docs/conf.py | 6 +- test_utils/scripts/update_docs.sh | 1 + third_party/sphinx/LICENSE | 280 ++++++++++++++++++ third_party/sphinx/README.md | 2 + .../templates/autosummary/class.rst | 35 +++ .../templates/autosummary/module.rst | 41 +++ 11 files changed, 459 insertions(+), 29 deletions(-) create mode 120000 docs/_templates/autosummary/class.rst create mode 120000 docs/_templates/autosummary/module.rst create mode 100644 docs/bigquery/.gitignore create mode 100644 third_party/sphinx/LICENSE create mode 100644 third_party/sphinx/README.md create mode 100644 third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/class.rst create mode 100644 third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/module.rst diff --git a/bigquery/google/cloud/bigquery/table.py b/bigquery/google/cloud/bigquery/table.py index 40eb7a02f85a..9d786703d02f 100644 --- a/bigquery/google/cloud/bigquery/table.py +++ b/bigquery/google/cloud/bigquery/table.py @@ -1084,8 +1084,6 @@ class RowIterator(HTTPIterator): page_size (int, optional): The number of items to return per page. extra_params (Dict[str, object]): Extra query string parameters for the API call. - - .. autoattribute:: pages """ def __init__(self, client, api_request, path, schema, page_token=None, diff --git a/docs/_templates/autosummary/class.rst b/docs/_templates/autosummary/class.rst new file mode 120000 index 000000000000..bd3c7e22590e --- /dev/null +++ b/docs/_templates/autosummary/class.rst @@ -0,0 +1 @@ +../../../third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/class.rst \ No newline at end of file diff --git a/docs/_templates/autosummary/module.rst b/docs/_templates/autosummary/module.rst new file mode 120000 index 000000000000..afd9c7b5e867 --- /dev/null +++ b/docs/_templates/autosummary/module.rst @@ -0,0 +1 @@ +../../../third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/module.rst \ No newline at end of file diff --git a/docs/bigquery/.gitignore b/docs/bigquery/.gitignore new file mode 100644 index 000000000000..3fe20bec0f3a --- /dev/null +++ b/docs/bigquery/.gitignore @@ -0,0 +1 @@ +generated/ \ No newline at end of file diff --git a/docs/bigquery/reference.rst b/docs/bigquery/reference.rst index da1d68982212..c1546f915bd6 100644 --- a/docs/bigquery/reference.rst +++ b/docs/bigquery/reference.rst @@ -1,68 +1,134 @@ API Reference ~~~~~~~~~~~~~ -.. automodule:: google.cloud.bigquery - :no-members: +.. currentmodule:: google.cloud.bigquery + +The main concepts with this API are: + +- :class:`~google.cloud.bigquery.client.Client` manages connections to the + BigQuery API. Use the client methods to run jobs (such as a + :class:`~google.cloud.bigquery.job.QueryJob` via + :meth:`~google.cloud.bigquery.client.Client.query`) and manage resources. + +- :class:`~google.cloud.bigquery.dataset.Dataset` represents a + collection of tables. + +- :class:`~google.cloud.bigquery.table.Table` represents a single "relation". Client ====== -.. automodule:: google.cloud.bigquery.client - :members: - :show-inheritance: +.. autosummary:: + :toctree: generated + client.Client Job === -.. automodule:: google.cloud.bigquery.job - :members: - :show-inheritance: +Job Configuration +----------------- + +.. autosummary:: + :toctree: generated + + job.QueryJobConfig + job.CopyJobConfig + job.LoadJobConfig + job.ExtractJobConfig + +Job Classes +----------- + +.. autosummary:: + :toctree: generated + + job.QueryJob + job.CopyJob + job.LoadJob + job.ExtractJob + job.UnknownJob + +Job-Related Types +----------------- + +.. autosummary:: + :toctree: generated + + job.Compression + job.CreateDisposition + job.DestinationFormat + job.Encoding + job.QueryPriority + job.SourceFormat + job.WriteDisposition Dataset ======= -.. automodule:: google.cloud.bigquery.dataset - :members: - :show-inheritance: +.. autosummary:: + :toctree: generated + + dataset.Dataset + dataset.DatasetReference + dataset.AccessEntry Table ===== -.. automodule:: google.cloud.bigquery.table - :members: - :show-inheritance: +.. autosummary:: + :toctree: generated + + table.Table + table.TableReference + table.Row + table.RowIterator + table.EncryptionConfiguration + table.TimePartitioning + table.TimePartitioningType Schema ====== -.. automodule:: google.cloud.bigquery.schema - :members: - :show-inheritance: +.. autosummary:: + :toctree: generated + + schema.SchemaField Query ===== -.. automodule:: google.cloud.bigquery.query - :members: - :show-inheritance: +.. autosummary:: + :toctree: generated + + query.ArrayQueryParameter + query.ScalarQueryParameter + query.StructQueryParameter + query.UDFResource External Configuration ====================== -.. automodule:: google.cloud.bigquery.external_config - :members: - :show-inheritance: +.. autosummary:: + :toctree: generated + + external_config.ExternalConfig + external_config.BigtableOptions + external_config.BigtableColumnFamily + external_config.BigtableColumn + external_config.CSVOptions + external_config.GoogleSheetsOptions Magics ====================== -.. automodule:: google.cloud.bigquery.magics - :members: - :show-inheritance: +.. autosummary:: + :toctree: generated + + magics diff --git a/docs/conf.py b/docs/conf.py index 173feda5e85c..2f6524fe9974 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,7 +50,7 @@ ] # Add any paths that contain templates here, relative to this directory. -templates_path = [] +templates_path = ['_templates'] # Allow markdown includes (so releases.md can include CHANGLEOG.md) # http://www.sphinx-doc.org/en/master/markdown.html @@ -306,6 +306,10 @@ # and parameter definitions from the __init__ docstring. autoclass_content = 'both' +# Automatically generate API reference stubs from autosummary. +# http://www.sphinx-doc.org/en/master/ext/autosummary.html#generating-stub-pages-automatically +autosummary_generate = True + # Configuration for intersphinx: intersphinx_mapping = { 'google-auth': ('https://google-auth.readthedocs.io/en/stable', None), diff --git a/test_utils/scripts/update_docs.sh b/test_utils/scripts/update_docs.sh index a5f0525843fd..be7018b8e325 100755 --- a/test_utils/scripts/update_docs.sh +++ b/test_utils/scripts/update_docs.sh @@ -24,6 +24,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # Function to build the docs. function build_docs { rm -rf docs/_build/ + rm -rf docs/bigquery/generated sphinx-build -W -b html -d docs/_build/doctrees docs/ docs/_build/html/ return $? } diff --git a/third_party/sphinx/LICENSE b/third_party/sphinx/LICENSE new file mode 100644 index 000000000000..09d3bd624f62 --- /dev/null +++ b/third_party/sphinx/LICENSE @@ -0,0 +1,280 @@ +License for Sphinx +================== + +Copyright (c) 2007-2017 by the Sphinx team (see AUTHORS file). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +* Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Licenses for incorporated software +================================== + +The pgen2 package, included in this distribution under the name +sphinx.pycode.pgen2, is available in the Python 2.6 distribution under +the PSF license agreement for Python: + +---------------------------------------------------------------------- +Copyright © 2001-2008 Python Software Foundation; All Rights Reserved. + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation + ("PSF"), and the Individual or Organization ("Licensee") accessing + and otherwise using Python 2.6 software in source or binary form + and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF + hereby grants Licensee a nonexclusive, royalty-free, world-wide + license to reproduce, analyze, test, perform and/or display + publicly, prepare derivative works, distribute, and otherwise use + Python 2.6 alone or in any derivative version, provided, however, + that PSF's License Agreement and PSF's notice of copyright, i.e., + "Copyright © 2001-2008 Python Software Foundation; All Rights + Reserved" are retained in Python 2.6 alone or in any derivative + version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on + or incorporates Python 2.6 or any part thereof, and wants to make + the derivative work available to others as provided herein, then + Licensee hereby agrees to include in any such work a brief summary + of the changes made to Python 2.6. + +4. PSF is making Python 2.6 available to Licensee on an "AS IS" basis. + PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR IMPLIED. BY + WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND DISCLAIMS ANY + REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS FOR ANY + PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 2.6 WILL NOT INFRINGE + ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON + 2.6 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS + AS A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON + 2.6, OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY + THEREOF. + +6. This License Agreement will automatically terminate upon a material + breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any + relationship of agency, partnership, or joint venture between PSF + and Licensee. This License Agreement does not grant permission to + use PSF trademarks or trade name in a trademark sense to endorse or + promote products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python 2.6, Licensee + agrees to be bound by the terms and conditions of this License + Agreement. +---------------------------------------------------------------------- + +The included smartypants module, included as sphinx.util.smartypants, +is available under the following license: + +---------------------------------------------------------------------- +SmartyPants_ license:: + + Copyright (c) 2003 John Gruber + (http://daringfireball.net/) + All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + * Neither the name "SmartyPants" nor the names of its + contributors may be used to endorse or promote products + derived from this software without specific prior written + permission. + + This software is provided by the copyright holders and + contributors "as is" and any express or implied warranties, + including, but not limited to, the implied warranties of + merchantability and fitness for a particular purpose are + disclaimed. In no event shall the copyright owner or contributors + be liable for any direct, indirect, incidental, special, + exemplary, or consequential damages (including, but not limited + to, procurement of substitute goods or services; loss of use, + data, or profits; or business interruption) however caused and on + any theory of liability, whether in contract, strict liability, or + tort (including negligence or otherwise) arising in any way out of + the use of this software, even if advised of the possibility of + such damage. + + +smartypants.py license:: + + smartypants.py is a derivative work of SmartyPants. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions + are met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials + provided with the distribution. + + This software is provided by the copyright holders and + contributors "as is" and any express or implied warranties, + including, but not limited to, the implied warranties of + merchantability and fitness for a particular purpose are + disclaimed. In no event shall the copyright owner or contributors + be liable for any direct, indirect, incidental, special, + exemplary, or consequential damages (including, but not limited + to, procurement of substitute goods or services; loss of use, + data, or profits; or business interruption) however caused and on + any theory of liability, whether in contract, strict liability, or + tort (including negligence or otherwise) arising in any way out of + the use of this software, even if advised of the possibility of + such damage. +---------------------------------------------------------------------- + +The ElementTree package, included in this distribution in +test/etree13, is available under the following license: + +---------------------------------------------------------------------- +The ElementTree toolkit is + +Copyright (c) 1999-2007 by Fredrik Lundh + +By obtaining, using, and/or copying this software and/or its +associated documentation, you agree that you have read, understood, +and will comply with the following terms and conditions: + +Permission to use, copy, modify, and distribute this software and its +associated documentation for any purpose and without fee is hereby +granted, provided that the above copyright notice appears in all +copies, and that both that copyright notice and this permission notice +appear in supporting documentation, and that the name of Secret Labs +AB or the author not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- ABILITY +AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +---------------------------------------------------------------------- + +The included JQuery JavaScript library is available under the MIT +license: + +---------------------------------------------------------------------- +Copyright (c) 2008 John Resig, http://jquery.com/ + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +---------------------------------------------------------------------- + +The included Underscore JavaScript library is available under the MIT +license: + +---------------------------------------------------------------------- +Copyright (c) 2009 Jeremy Ashkenas, DocumentCloud + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +------------------------------------------------------------------------------- + +The included implementation of NumpyDocstring._parse_numpydoc_see_also_section +was derived from code under the following license: + +------------------------------------------------------------------------------- + +Copyright (C) 2008 Stefan van der Walt , Pauli Virtanen + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, +INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, +STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING +IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. + +------------------------------------------------------------------------------- diff --git a/third_party/sphinx/README.md b/third_party/sphinx/README.md new file mode 100644 index 000000000000..f4b4700b0d95 --- /dev/null +++ b/third_party/sphinx/README.md @@ -0,0 +1,2 @@ +Subset of [Sphinx](https://github.com/sphinx-doc/sphinx) to override built-in +templates. diff --git a/third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/class.rst b/third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/class.rst new file mode 100644 index 000000000000..852cc624243d --- /dev/null +++ b/third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/class.rst @@ -0,0 +1,35 @@ +{{ fullname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + + {% block methods %} + .. automethod:: __init__ + + {% if methods %} + .. rubric:: Methods + + {# Customized from original by adding toctree. This generates docs for the + listed functions. #} + .. autosummary:: + :toctree: . + {% for item in methods %} + {% if item != '__init__' %} + ~{{ name }}.{{ item }} + {% endif %} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block attributes %} + {% if attributes %} + .. rubric:: Attributes + + .. autosummary:: + :toctree: . + {% for item in attributes %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} diff --git a/third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/module.rst b/third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/module.rst new file mode 100644 index 000000000000..3abee6f3632a --- /dev/null +++ b/third_party/sphinx/sphinx/ext/autosummary/templates/autosummary/module.rst @@ -0,0 +1,41 @@ +{{ fullname | escape | underline}} + +.. automodule:: {{ fullname }} + + {% block functions %} + {% if functions %} + .. rubric:: Functions + + {# Customized from original by adding toctree. This generates docs for the + listed functions. #} + .. autosummary:: + :toctree: . + {% for item in functions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block classes %} + {% if classes %} + .. rubric:: Classes + + .. autosummary:: + :toctree: . + {% for item in classes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block exceptions %} + {% if exceptions %} + .. rubric:: Exceptions + + .. autosummary:: + :toctree: . + {% for item in exceptions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} From e592ca39719b0dd6ad9da0473ebce618e5b60d2b Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 22 May 2018 16:03:07 -0400 Subject: [PATCH 24/75] Avoid overwriting '__module__' of messages from shared modules. (#5364) Note that we *are* still overwriting it for messages from modules defined within the current package. See #4715. --- .../cloud/bigquery_datatransfer_v1/types.py | 49 +++++++---- .../google/cloud/bigtable_admin_v2/types.py | 60 +++++++------ bigtable/google/cloud/bigtable_v2/types.py | 38 +++++--- container/google/cloud/container_v1/types.py | 30 +++++-- dataproc/google/cloud/dataproc_v1/types.py | 49 +++++++---- datastore/google/cloud/datastore_v1/types.py | 44 ++++++---- dlp/google/cloud/dlp_v2/types.py | 48 +++++++---- .../cloud/errorreporting_v1beta1/types.py | 46 ++++++---- .../google/cloud/firestore_v1beta1/types.py | 58 ++++++++----- language/google/cloud/language_v1/types.py | 27 ++++-- .../google/cloud/language_v1beta2/types.py | 37 +++++--- logging/google/cloud/logging_v2/types.py | 64 ++++++++------ .../google/cloud/monitoring_v3/types.py | 86 +++++++++++-------- oslogin/google/cloud/oslogin_v1/types.py | 35 +++++--- pubsub/google/cloud/pubsub_v1/types.py | 48 ++++++----- redis/google/cloud/redis_v1beta1/types.py | 39 ++++++--- .../cloud/spanner_admin_database_v1/types.py | 46 ++++++---- .../cloud/spanner_admin_instance_v1/types.py | 47 ++++++---- spanner/google/cloud/spanner_v1/types.py | 56 +++++++----- speech/google/cloud/speech_v1/types.py | 40 ++++++--- speech/google/cloud/speech_v1p1beta1/types.py | 40 ++++++--- tasks/google/cloud/tasks_v2beta2/types.py | 56 +++++++----- .../cloud/texttospeech_v1beta1/types.py | 28 ++++-- trace/google/cloud/trace_v1/types.py | 32 ++++--- trace/google/cloud/trace_v2/types.py | 42 +++++---- .../cloud/videointelligence_v1/types.py | 40 ++++++--- .../cloud/videointelligence_v1beta1/types.py | 38 +++++--- .../cloud/videointelligence_v1beta2/types.py | 40 ++++++--- .../videointelligence_v1p1beta1/types.py | 41 ++++++--- vision/google/cloud/vision_v1/types.py | 50 +++++++---- vision/google/cloud/vision_v1p1beta1/types.py | 50 +++++++---- vision/google/cloud/vision_v1p2beta1/types.py | 56 +++++++----- .../cloud/websecurityscanner_v1alpha/types.py | 60 +++++++------ 33 files changed, 957 insertions(+), 563 deletions(-) diff --git a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/types.py b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/types.py index a74a92cd893f..4998328dbcce 100644 --- a/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/types.py +++ b/bigquery_datatransfer/google/cloud/bigquery_datatransfer_v1/types.py @@ -15,11 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 -from google.cloud.bigquery_datatransfer_v1.proto import transfer_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import duration_pb2 @@ -30,21 +26,38 @@ from google.protobuf import wrappers_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.bigquery_datatransfer_v1.proto import datatransfer_pb2 +from google.cloud.bigquery_datatransfer_v1.proto import transfer_pb2 + + +_shared_modules = [ + http_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + struct_pb2, + timestamp_pb2, + wrappers_pb2, + status_pb2, +] + + +_local_modules = [ + datatransfer_pb2, + transfer_pb2, +] + names = [] -for module in ( - http_pb2, - datatransfer_pb2, - transfer_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - struct_pb2, - timestamp_pb2, - wrappers_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.bigquery_datatransfer_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/bigtable/google/cloud/bigtable_admin_v2/types.py b/bigtable/google/cloud/bigtable_admin_v2/types.py index b796a7c9bf39..6f91fac2db0c 100644 --- a/bigtable/google/cloud/bigtable_admin_v2/types.py +++ b/bigtable/google/cloud/bigtable_admin_v2/types.py @@ -15,13 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.bigtable_admin_v2.proto import bigtable_instance_admin_pb2 -from google.cloud.bigtable_admin_v2.proto import bigtable_table_admin_pb2 -from google.cloud.bigtable_admin_v2.proto import instance_pb2 -from google.cloud.bigtable_admin_v2.proto import table_pb2 from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from google.iam.v1.logging import audit_data_pb2 @@ -34,25 +28,43 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.bigtable_admin_v2.proto import bigtable_instance_admin_pb2 +from google.cloud.bigtable_admin_v2.proto import bigtable_table_admin_pb2 +from google.cloud.bigtable_admin_v2.proto import instance_pb2 +from google.cloud.bigtable_admin_v2.proto import table_pb2 + + +_shared_modules = [ + http_pb2, + iam_policy_pb2, + policy_pb2, + audit_data_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + bigtable_instance_admin_pb2, + bigtable_table_admin_pb2, + instance_pb2, + table_pb2, +] + names = [] -for module in ( - http_pb2, - bigtable_instance_admin_pb2, - bigtable_table_admin_pb2, - instance_pb2, - table_pb2, - iam_policy_pb2, - policy_pb2, - audit_data_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.bigtable_admin_v2.types' setattr(sys.modules[__name__], name, message) diff --git a/bigtable/google/cloud/bigtable_v2/types.py b/bigtable/google/cloud/bigtable_v2/types.py index b7edfdccdbd5..a5d64f46ef07 100644 --- a/bigtable/google/cloud/bigtable_v2/types.py +++ b/bigtable/google/cloud/bigtable_v2/types.py @@ -15,26 +15,38 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.bigtable_v2.proto import bigtable_pb2 -from google.cloud.bigtable_v2.proto import data_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import wrappers_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.bigtable_v2.proto import bigtable_pb2 +from google.cloud.bigtable_v2.proto import data_pb2 + + +_shared_modules = [ + http_pb2, + any_pb2, + descriptor_pb2, + wrappers_pb2, + status_pb2, +] + +_local_modules = [ + bigtable_pb2, + data_pb2, +] + names = [] -for module in ( - http_pb2, - bigtable_pb2, - data_pb2, - any_pb2, - descriptor_pb2, - wrappers_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.bigtable_v2.types' setattr(sys.modules[__name__], name, message) diff --git a/container/google/cloud/container_v1/types.py b/container/google/cloud/container_v1/types.py index a6fe3139bdad..df18e10f27ab 100644 --- a/container/google/cloud/container_v1/types.py +++ b/container/google/cloud/container_v1/types.py @@ -15,20 +15,32 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.container_v1.proto import cluster_service_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import empty_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.container_v1.proto import cluster_service_pb2 + + +_shared_modules = [ + http_pb2, + descriptor_pb2, + empty_pb2, +] + +_local_modules = [ + cluster_service_pb2, +] + names = [] -for module in ( - http_pb2, - cluster_service_pb2, - descriptor_pb2, - empty_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.container_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/dataproc/google/cloud/dataproc_v1/types.py b/dataproc/google/cloud/dataproc_v1/types.py index faa32b3c17dd..8c7d50941a8b 100644 --- a/dataproc/google/cloud/dataproc_v1/types.py +++ b/dataproc/google/cloud/dataproc_v1/types.py @@ -15,12 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.dataproc_v1.proto import clusters_pb2 -from google.cloud.dataproc_v1.proto import jobs_pb2 -from google.cloud.dataproc_v1.proto import operations_pb2 as proto_operations_pb2 from google.longrunning import operations_pb2 as longrunning_operations_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -30,21 +25,37 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.dataproc_v1.proto import clusters_pb2 +from google.cloud.dataproc_v1.proto import jobs_pb2 +from google.cloud.dataproc_v1.proto import operations_pb2 as proto_operations_pb2 + +_shared_modules = [ + http_pb2, + longrunning_operations_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + clusters_pb2, + jobs_pb2, + proto_operations_pb2, +] + names = [] -for module in ( - http_pb2, - clusters_pb2, - jobs_pb2, - proto_operations_pb2, - longrunning_operations_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.dataproc_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/datastore/google/cloud/datastore_v1/types.py b/datastore/google/cloud/datastore_v1/types.py index a07136cd8624..6f9f243b3a84 100644 --- a/datastore/google/cloud/datastore_v1/types.py +++ b/datastore/google/cloud/datastore_v1/types.py @@ -15,30 +15,42 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.datastore_v1.proto import datastore_pb2 -from google.cloud.datastore_v1.proto import entity_pb2 -from google.cloud.datastore_v1.proto import query_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import struct_pb2 from google.protobuf import timestamp_pb2 from google.protobuf import wrappers_pb2 from google.type import latlng_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.datastore_v1.proto import datastore_pb2 +from google.cloud.datastore_v1.proto import entity_pb2 +from google.cloud.datastore_v1.proto import query_pb2 + + +_shared_modules = [ + http_pb2, + descriptor_pb2, + struct_pb2, + timestamp_pb2, + wrappers_pb2, + latlng_pb2, +] + +_local_modules = [ + datastore_pb2, + entity_pb2, + query_pb2, +] + names = [] -for module in ( - http_pb2, - datastore_pb2, - entity_pb2, - query_pb2, - descriptor_pb2, - struct_pb2, - timestamp_pb2, - wrappers_pb2, - latlng_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.datastore_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/dlp/google/cloud/dlp_v2/types.py b/dlp/google/cloud/dlp_v2/types.py index 1cd121965a78..ffdd1a2e3c7c 100644 --- a/dlp/google/cloud/dlp_v2/types.py +++ b/dlp/google/cloud/dlp_v2/types.py @@ -15,11 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.dlp_v2.proto import dlp_pb2 -from google.cloud.dlp_v2.proto import storage_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import duration_pb2 @@ -30,21 +26,37 @@ from google.type import date_pb2 from google.type import timeofday_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.dlp_v2.proto import dlp_pb2 +from google.cloud.dlp_v2.proto import storage_pb2 + + +_shared_modules = [ + http_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, + date_pb2, + timeofday_pb2, +] + +_local_modules = [ + dlp_pb2, + storage_pb2, +] + names = [] -for module in ( - http_pb2, - dlp_pb2, - storage_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, - date_pb2, - timeofday_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.dlp_v2.types' setattr(sys.modules[__name__], name, message) diff --git a/error_reporting/google/cloud/errorreporting_v1beta1/types.py b/error_reporting/google/cloud/errorreporting_v1beta1/types.py index 978d281752d1..0c54b22b5856 100644 --- a/error_reporting/google/cloud/errorreporting_v1beta1/types.py +++ b/error_reporting/google/cloud/errorreporting_v1beta1/types.py @@ -15,32 +15,44 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 from google.api import label_pb2 from google.api import monitored_resource_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import timestamp_pb2 + +from google.api_core.protobuf_helpers import get_messages from google.cloud.errorreporting_v1beta1.proto import common_pb2 from google.cloud.errorreporting_v1beta1.proto import error_group_service_pb2 from google.cloud.errorreporting_v1beta1.proto import error_stats_service_pb2 from google.cloud.errorreporting_v1beta1.proto import report_errors_service_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import timestamp_pb2 + + +_shared_modules = [ + http_pb2, + label_pb2, + monitored_resource_pb2, + report_errors_service_pb2, + descriptor_pb2, + duration_pb2, + timestamp_pb2, +] + +_local_modules = [ + common_pb2, + error_group_service_pb2, + error_stats_service_pb2, +] names = [] -for module in ( - http_pb2, - label_pb2, - monitored_resource_pb2, - common_pb2, - error_group_service_pb2, - error_stats_service_pb2, - report_errors_service_pb2, - descriptor_pb2, - duration_pb2, - timestamp_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.errorreporting_v1beta1.types' setattr(sys.modules[__name__], name, message) diff --git a/firestore/google/cloud/firestore_v1beta1/types.py b/firestore/google/cloud/firestore_v1beta1/types.py index 43804fd3876b..9e21515fa717 100644 --- a/firestore/google/cloud/firestore_v1beta1/types.py +++ b/firestore/google/cloud/firestore_v1beta1/types.py @@ -15,14 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.firestore_v1beta1.proto import common_pb2 -from google.cloud.firestore_v1beta1.proto import document_pb2 -from google.cloud.firestore_v1beta1.proto import firestore_pb2 -from google.cloud.firestore_v1beta1.proto import query_pb2 -from google.cloud.firestore_v1beta1.proto import write_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import empty_pb2 @@ -32,23 +25,42 @@ from google.rpc import status_pb2 from google.type import latlng_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.firestore_v1beta1.proto import common_pb2 +from google.cloud.firestore_v1beta1.proto import document_pb2 +from google.cloud.firestore_v1beta1.proto import firestore_pb2 +from google.cloud.firestore_v1beta1.proto import query_pb2 +from google.cloud.firestore_v1beta1.proto import write_pb2 + + +_shared_modules = [ + http_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + struct_pb2, + timestamp_pb2, + wrappers_pb2, + status_pb2, + latlng_pb2, +] + +_local_modules = [ + common_pb2, + document_pb2, + firestore_pb2, + query_pb2, + write_pb2, +] + names = [] -for module in ( - http_pb2, - common_pb2, - document_pb2, - firestore_pb2, - query_pb2, - write_pb2, - any_pb2, - descriptor_pb2, - empty_pb2, - struct_pb2, - timestamp_pb2, - wrappers_pb2, - status_pb2, - latlng_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.firestore_v1beta1.types' setattr(sys.modules[__name__], name, message) diff --git a/language/google/cloud/language_v1/types.py b/language/google/cloud/language_v1/types.py index 0ac5ec174b21..469269333ef0 100644 --- a/language/google/cloud/language_v1/types.py +++ b/language/google/cloud/language_v1/types.py @@ -15,17 +15,30 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.language_v1.proto import language_service_pb2 from google.protobuf import descriptor_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.language_v1.proto import language_service_pb2 + + +_shared_modules = [ + http_pb2, + descriptor_pb2, +] + +_local_modules = [ + language_service_pb2, +] + names = [] -for module in ( - http_pb2, - language_service_pb2, - descriptor_pb2, ): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.language_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/language/google/cloud/language_v1beta2/types.py b/language/google/cloud/language_v1beta2/types.py index 4439570ac5e5..f17b5a59dd96 100644 --- a/language/google/cloud/language_v1beta2/types.py +++ b/language/google/cloud/language_v1beta2/types.py @@ -15,10 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.language_v1beta2.proto import language_service_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -26,16 +23,32 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.language_v1beta2.proto import language_service_pb2 + + +_shared_modules = [ + http_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + language_service_pb2, +] + names = [] -for module in ( - http_pb2, - language_service_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - empty_pb2, - timestamp_pb2, - status_pb2, ): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.language_v1beta2.types' setattr(sys.modules[__name__], name, message) diff --git a/logging/google/cloud/logging_v2/types.py b/logging/google/cloud/logging_v2/types.py index d440d8f58f07..25787f31017f 100644 --- a/logging/google/cloud/logging_v2/types.py +++ b/logging/google/cloud/logging_v2/types.py @@ -15,17 +15,11 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import distribution_pb2 from google.api import http_pb2 from google.api import label_pb2 from google.api import metric_pb2 from google.api import monitored_resource_pb2 -from google.cloud.logging_v2.proto import log_entry_pb2 -from google.cloud.logging_v2.proto import logging_config_pb2 -from google.cloud.logging_v2.proto import logging_metrics_pb2 -from google.cloud.logging_v2.proto import logging_pb2 from google.logging.type import http_request_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -36,27 +30,45 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.logging_v2.proto import log_entry_pb2 +from google.cloud.logging_v2.proto import logging_config_pb2 +from google.cloud.logging_v2.proto import logging_metrics_pb2 +from google.cloud.logging_v2.proto import logging_pb2 + + +_shared_modules = [ + distribution_pb2, + http_pb2, + label_pb2, + metric_pb2, + monitored_resource_pb2, + http_request_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + struct_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + log_entry_pb2, + logging_config_pb2, + logging_metrics_pb2, + logging_pb2, +] + names = [] -for module in ( - distribution_pb2, - http_pb2, - label_pb2, - metric_pb2, - monitored_resource_pb2, - log_entry_pb2, - logging_config_pb2, - logging_metrics_pb2, - logging_pb2, - http_request_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - struct_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.logging_v2.types' setattr(sys.modules[__name__], name, message) diff --git a/monitoring/google/cloud/monitoring_v3/types.py b/monitoring/google/cloud/monitoring_v3/types.py index 12408f9d8d47..b89b3f80d19a 100644 --- a/monitoring/google/cloud/monitoring_v3/types.py +++ b/monitoring/google/cloud/monitoring_v3/types.py @@ -15,13 +15,21 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import distribution_pb2 from google.api import http_pb2 from google.api import label_pb2 from google.api import metric_pb2 as api_metric_pb2 from google.api import monitored_resource_pb2 +from google.protobuf import any_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 +from google.protobuf import wrappers_pb2 +from google.rpc import status_pb2 + +from google.api_core.protobuf_helpers import get_messages from google.cloud.monitoring_v3.proto import alert_pb2 from google.cloud.monitoring_v3.proto import alert_service_pb2 from google.cloud.monitoring_v3.proto import common_pb2 @@ -34,43 +42,47 @@ from google.cloud.monitoring_v3.proto import notification_service_pb2 from google.cloud.monitoring_v3.proto import uptime_pb2 from google.cloud.monitoring_v3.proto import uptime_service_pb2 -from google.protobuf import any_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 -from google.protobuf import wrappers_pb2 -from google.rpc import status_pb2 + + +_shared_modules = [ + distribution_pb2, + http_pb2, + label_pb2, + api_metric_pb2, + monitored_resource_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + wrappers_pb2, + status_pb2, +] + +_local_modules = [ + alert_pb2, + alert_service_pb2, + common_pb2, + group_pb2, + group_service_pb2, + proto_metric_pb2, + metric_service_pb2, + mutation_record_pb2, + notification_pb2, + notification_service_pb2, + uptime_pb2, + uptime_service_pb2, +] names = [] -for module in ( - distribution_pb2, - http_pb2, - label_pb2, - api_metric_pb2, - monitored_resource_pb2, - alert_pb2, - alert_service_pb2, - common_pb2, - group_pb2, - group_service_pb2, - proto_metric_pb2, - metric_service_pb2, - mutation_record_pb2, - notification_pb2, - notification_service_pb2, - uptime_pb2, - uptime_service_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - wrappers_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.monitoring_v3.types' setattr(sys.modules[__name__], name, message) diff --git a/oslogin/google/cloud/oslogin_v1/types.py b/oslogin/google/cloud/oslogin_v1/types.py index 5d75dad11362..fa5dc2687642 100644 --- a/oslogin/google/cloud/oslogin_v1/types.py +++ b/oslogin/google/cloud/oslogin_v1/types.py @@ -15,24 +15,35 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.oslogin_v1.proto import common_pb2 -from google.cloud.oslogin_v1.proto import oslogin_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import empty_pb2 from google.protobuf import field_mask_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.oslogin_v1.proto import common_pb2 +from google.cloud.oslogin_v1.proto import oslogin_pb2 + +_shared_modules = [ + http_pb2, + descriptor_pb2, + empty_pb2, + field_mask_pb2, +] + +_local_modules = [ + common_pb2, + oslogin_pb2, +] + names = [] -for module in ( - http_pb2, - common_pb2, - oslogin_pb2, - descriptor_pb2, - empty_pb2, - field_mask_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.oslogin_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/pubsub/google/cloud/pubsub_v1/types.py b/pubsub/google/cloud/pubsub_v1/types.py index fbb21af02e33..1ac99a96534b 100644 --- a/pubsub/google/cloud/pubsub_v1/types.py +++ b/pubsub/google/cloud/pubsub_v1/types.py @@ -16,10 +16,7 @@ import collections import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.pubsub_v1.proto import pubsub_pb2 from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from google.iam.v1.logging import audit_data_pb2 @@ -29,6 +26,9 @@ from google.protobuf import field_mask_pb2 from google.protobuf import timestamp_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.pubsub_v1.proto import pubsub_pb2 + # Define the default values for batching. # @@ -67,24 +67,32 @@ ) +_shared_modules = [ + http_pb2, + iam_policy_pb2, + policy_pb2, + audit_data_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, +] + +_local_modules = [ + pubsub_pb2, +] + + names = ['BatchSettings', 'FlowControl'] -for name, message in get_messages(pubsub_pb2).items(): - message.__module__ = 'google.cloud.pubsub_v1.types' - setattr(sys.modules[__name__], name, message) - names.append(name) - - -for module in ( - http_pb2, - pubsub_pb2, - iam_policy_pb2, - policy_pb2, - audit_data_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, ): + + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.pubsub_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/redis/google/cloud/redis_v1beta1/types.py b/redis/google/cloud/redis_v1beta1/types.py index 05b300468373..c854d1fb68d4 100644 --- a/redis/google/cloud/redis_v1beta1/types.py +++ b/redis/google/cloud/redis_v1beta1/types.py @@ -15,10 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.redis_v1beta1.proto import cloud_redis_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -27,18 +24,32 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.redis_v1beta1.proto import cloud_redis_pb2 + +_shared_modules = [ + http_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + cloud_redis_pb2, +] + names = [] -for module in ( - http_pb2, - cloud_redis_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.redis_v1beta1.types' setattr(sys.modules[__name__], name, message) diff --git a/spanner/google/cloud/spanner_admin_database_v1/types.py b/spanner/google/cloud/spanner_admin_database_v1/types.py index a31e298d575f..56ac4f8fb39a 100644 --- a/spanner/google/cloud/spanner_admin_database_v1/types.py +++ b/spanner/google/cloud/spanner_admin_database_v1/types.py @@ -15,11 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.spanner_admin_database_v1.proto import ( - spanner_database_admin_pb2) from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from google.iam.v1.logging import audit_data_pb2 @@ -30,20 +26,36 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.spanner_admin_database_v1.proto import ( + spanner_database_admin_pb2) + + +_shared_modules = [ + http_pb2, + iam_policy_pb2, + policy_pb2, + audit_data_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + spanner_database_admin_pb2, +] + names = [] -for module in ( - http_pb2, - spanner_database_admin_pb2, - iam_policy_pb2, - policy_pb2, - audit_data_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - empty_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.spanner_admin_database_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/spanner/google/cloud/spanner_admin_instance_v1/types.py b/spanner/google/cloud/spanner_admin_instance_v1/types.py index 73acc2b574f5..8725bcba6369 100644 --- a/spanner/google/cloud/spanner_admin_instance_v1/types.py +++ b/spanner/google/cloud/spanner_admin_instance_v1/types.py @@ -15,11 +15,8 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages from google.api import http_pb2 -from google.cloud.spanner_admin_instance_v1.proto import ( - spanner_instance_admin_pb2) from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from google.iam.v1.logging import audit_data_pb2 @@ -31,21 +28,37 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.spanner_admin_instance_v1.proto import ( + spanner_instance_admin_pb2) + + +_shared_modules = [ + http_pb2, + iam_policy_pb2, + policy_pb2, + audit_data_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + spanner_instance_admin_pb2, +] + names = [] -for module in ( - http_pb2, - spanner_instance_admin_pb2, - iam_policy_pb2, - policy_pb2, - audit_data_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.spanner_admin_instance_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/spanner/google/cloud/spanner_v1/types.py b/spanner/google/cloud/spanner_v1/types.py index 8eca4c62370b..256308b3692a 100644 --- a/spanner/google/cloud/spanner_v1/types.py +++ b/spanner/google/cloud/spanner_v1/types.py @@ -15,9 +15,14 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import struct_pb2 +from google.protobuf import timestamp_pb2 + +from google.api_core.protobuf_helpers import get_messages from google.cloud.spanner_v1.proto import keys_pb2 from google.cloud.spanner_v1.proto import mutation_pb2 from google.cloud.spanner_v1.proto import query_plan_pb2 @@ -25,28 +30,35 @@ from google.cloud.spanner_v1.proto import spanner_pb2 from google.cloud.spanner_v1.proto import transaction_pb2 from google.cloud.spanner_v1.proto import type_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import struct_pb2 -from google.protobuf import timestamp_pb2 + + +_shared_modules = [ + http_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + struct_pb2, + timestamp_pb2, +] + +_local_modules = [ + keys_pb2, + mutation_pb2, + query_plan_pb2, + result_set_pb2, + spanner_pb2, + transaction_pb2, + type_pb2, +] names = [] -for module in ( - http_pb2, - keys_pb2, - mutation_pb2, - query_plan_pb2, - result_set_pb2, - spanner_pb2, - transaction_pb2, - type_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - struct_pb2, - timestamp_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.spanner_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/speech/google/cloud/speech_v1/types.py b/speech/google/cloud/speech_v1/types.py index a5d04962a178..414d4d91ad60 100644 --- a/speech/google/cloud/speech_v1/types.py +++ b/speech/google/cloud/speech_v1/types.py @@ -15,10 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.speech_v1.proto import cloud_speech_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -27,18 +24,33 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.speech_v1.proto import cloud_speech_pb2 + + +_shared_modules = [ + http_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + cloud_speech_pb2, +] + names = [] -for module in ( - http_pb2, - cloud_speech_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.speech_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/speech/google/cloud/speech_v1p1beta1/types.py b/speech/google/cloud/speech_v1p1beta1/types.py index e13bc3ba138c..c4101981fba1 100644 --- a/speech/google/cloud/speech_v1p1beta1/types.py +++ b/speech/google/cloud/speech_v1p1beta1/types.py @@ -15,10 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.speech_v1p1beta1.proto import cloud_speech_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -27,18 +24,33 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.speech_v1p1beta1.proto import cloud_speech_pb2 + + +_shared_modules = [ + http_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + cloud_speech_pb2, +] + names = [] -for module in ( - http_pb2, - cloud_speech_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.speech_v1p1beta1.types' setattr(sys.modules[__name__], name, message) diff --git a/tasks/google/cloud/tasks_v2beta2/types.py b/tasks/google/cloud/tasks_v2beta2/types.py index e9367f78cce5..1a399d3e1149 100644 --- a/tasks/google/cloud/tasks_v2beta2/types.py +++ b/tasks/google/cloud/tasks_v2beta2/types.py @@ -15,13 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2 -from google.cloud.tasks_v2beta2.proto import queue_pb2 -from google.cloud.tasks_v2beta2.proto import target_pb2 -from google.cloud.tasks_v2beta2.proto import task_pb2 from google.iam.v1 import iam_policy_pb2 from google.iam.v1 import policy_pb2 from google.protobuf import any_pb2 @@ -32,23 +26,41 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.tasks_v2beta2.proto import cloudtasks_pb2 +from google.cloud.tasks_v2beta2.proto import queue_pb2 +from google.cloud.tasks_v2beta2.proto import target_pb2 +from google.cloud.tasks_v2beta2.proto import task_pb2 + + +_shared_modules = [ + http_pb2, + iam_policy_pb2, + policy_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + cloudtasks_pb2, + queue_pb2, + target_pb2, + task_pb2, +] + names = [] -for module in ( - http_pb2, - cloudtasks_pb2, - queue_pb2, - target_pb2, - task_pb2, - iam_policy_pb2, - policy_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.tasks_v2beta2.types' setattr(sys.modules[__name__], name, message) diff --git a/texttospeech/google/cloud/texttospeech_v1beta1/types.py b/texttospeech/google/cloud/texttospeech_v1beta1/types.py index adaab109d542..5402e1338e69 100644 --- a/texttospeech/google/cloud/texttospeech_v1beta1/types.py +++ b/texttospeech/google/cloud/texttospeech_v1beta1/types.py @@ -15,18 +15,30 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.texttospeech_v1beta1.proto import cloud_tts_pb2 from google.protobuf import descriptor_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.texttospeech_v1beta1.proto import cloud_tts_pb2 + + +_shared_modules = [ + http_pb2, + descriptor_pb2, +] + +_local_modules = [ + cloud_tts_pb2, +] + names = [] -for module in ( - http_pb2, - cloud_tts_pb2, - descriptor_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.texttospeech_v1beta1.types' setattr(sys.modules[__name__], name, message) diff --git a/trace/google/cloud/trace_v1/types.py b/trace/google/cloud/trace_v1/types.py index 49c585823c5a..0f22cc169f53 100644 --- a/trace/google/cloud/trace_v1/types.py +++ b/trace/google/cloud/trace_v1/types.py @@ -15,22 +15,34 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.trace_v1.proto import trace_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import empty_pb2 from google.protobuf import timestamp_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.trace_v1.proto import trace_pb2 + + +_shared_modules = [ + http_pb2, + descriptor_pb2, + empty_pb2, + timestamp_pb2, +] + +_local_modules = [ + trace_pb2, +] + names = [] -for module in ( - http_pb2, - trace_pb2, - descriptor_pb2, - empty_pb2, - timestamp_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.trace_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/trace/google/cloud/trace_v2/types.py b/trace/google/cloud/trace_v2/types.py index d7ae84583c7c..f74c6218e1df 100644 --- a/trace/google/cloud/trace_v2/types.py +++ b/trace/google/cloud/trace_v2/types.py @@ -15,11 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.trace_v2.proto import trace_pb2 -from google.cloud.trace_v2.proto import tracing_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import empty_pb2 @@ -27,18 +23,34 @@ from google.protobuf import wrappers_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.trace_v2.proto import trace_pb2 +from google.cloud.trace_v2.proto import tracing_pb2 + + +_shared_modules = [ + http_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + timestamp_pb2, + wrappers_pb2, + status_pb2, +] + +_local_modules = [ + trace_pb2, + tracing_pb2, +] + names = [] -for module in ( - http_pb2, - trace_pb2, - tracing_pb2, - any_pb2, - descriptor_pb2, - empty_pb2, - timestamp_pb2, - wrappers_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.trace_v2.types' setattr(sys.modules[__name__], name, message) diff --git a/videointelligence/google/cloud/videointelligence_v1/types.py b/videointelligence/google/cloud/videointelligence_v1/types.py index 4d34475d179a..1d59ad8f3b76 100644 --- a/videointelligence/google/cloud/videointelligence_v1/types.py +++ b/videointelligence/google/cloud/videointelligence_v1/types.py @@ -15,10 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.videointelligence_v1.proto import video_intelligence_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -27,18 +24,33 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.videointelligence_v1.proto import video_intelligence_pb2 + + +_shared_modules = [ + http_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + video_intelligence_pb2, +] + names = [] -for module in ( - http_pb2, - video_intelligence_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.videointelligence_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/videointelligence/google/cloud/videointelligence_v1beta1/types.py b/videointelligence/google/cloud/videointelligence_v1beta1/types.py index 49c0459d2fea..35dd7b2abd3e 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta1/types.py +++ b/videointelligence/google/cloud/videointelligence_v1beta1/types.py @@ -15,10 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -26,17 +23,32 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.videointelligence_v1beta1.proto import video_intelligence_pb2 + + +_shared_modules = [ + http_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + video_intelligence_pb2, +] + names = [] -for module in ( - http_pb2, - video_intelligence_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - empty_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.videointelligence_v1beta1.types' setattr(sys.modules[__name__], name, message) diff --git a/videointelligence/google/cloud/videointelligence_v1beta2/types.py b/videointelligence/google/cloud/videointelligence_v1beta2/types.py index da516eb8b06d..4772f678a773 100644 --- a/videointelligence/google/cloud/videointelligence_v1beta2/types.py +++ b/videointelligence/google/cloud/videointelligence_v1beta2/types.py @@ -15,10 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.videointelligence_v1beta2.proto import video_intelligence_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -27,18 +24,33 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.videointelligence_v1beta2.proto import video_intelligence_pb2 + + +_shared_modules = [ + http_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + video_intelligence_pb2, +] + names = [] -for module in ( - http_pb2, - video_intelligence_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.videointelligence_v1beta2.types' setattr(sys.modules[__name__], name, message) diff --git a/videointelligence/google/cloud/videointelligence_v1p1beta1/types.py b/videointelligence/google/cloud/videointelligence_v1p1beta1/types.py index fabf956b94c6..90a88ae5f49c 100644 --- a/videointelligence/google/cloud/videointelligence_v1p1beta1/types.py +++ b/videointelligence/google/cloud/videointelligence_v1p1beta1/types.py @@ -15,10 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.videointelligence_v1p1beta1.proto import video_intelligence_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -27,18 +24,34 @@ from google.protobuf import timestamp_pb2 from google.rpc import status_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.videointelligence_v1p1beta1.proto import ( + video_intelligence_pb2) + + +_shared_modules = [ + http_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + timestamp_pb2, + status_pb2, +] + +_local_modules = [ + video_intelligence_pb2, +] + names = [] -for module in ( - http_pb2, - video_intelligence_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - duration_pb2, - empty_pb2, - timestamp_pb2, - status_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.videointelligence_v1p1beta1.types' setattr(sys.modules[__name__], name, message) diff --git a/vision/google/cloud/vision_v1/types.py b/vision/google/cloud/vision_v1/types.py index 95a90422c1a6..d6eeecd212fd 100644 --- a/vision/google/cloud/vision_v1/types.py +++ b/vision/google/cloud/vision_v1/types.py @@ -15,13 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.vision_v1.proto import geometry_pb2 -from google.cloud.vision_v1.proto import image_annotator_pb2 -from google.cloud.vision_v1.proto import text_annotation_pb2 -from google.cloud.vision_v1.proto import web_detection_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import wrappers_pb2 @@ -29,20 +23,38 @@ from google.type import color_pb2 from google.type import latlng_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.vision_v1.proto import geometry_pb2 +from google.cloud.vision_v1.proto import image_annotator_pb2 +from google.cloud.vision_v1.proto import text_annotation_pb2 +from google.cloud.vision_v1.proto import web_detection_pb2 + + +_shared_modules = [ + http_pb2, + any_pb2, + descriptor_pb2, + wrappers_pb2, + status_pb2, + color_pb2, + latlng_pb2, +] + +_local_modules = [ + geometry_pb2, + image_annotator_pb2, + text_annotation_pb2, + web_detection_pb2, +] + names = [] -for module in ( - http_pb2, - geometry_pb2, - image_annotator_pb2, - text_annotation_pb2, - web_detection_pb2, - any_pb2, - descriptor_pb2, - wrappers_pb2, - status_pb2, - color_pb2, - latlng_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.vision_v1.types' setattr(sys.modules[__name__], name, message) diff --git a/vision/google/cloud/vision_v1p1beta1/types.py b/vision/google/cloud/vision_v1p1beta1/types.py index 6d60d9512a08..f31430e48f47 100644 --- a/vision/google/cloud/vision_v1p1beta1/types.py +++ b/vision/google/cloud/vision_v1p1beta1/types.py @@ -15,13 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.vision_v1p1beta1.proto import geometry_pb2 -from google.cloud.vision_v1p1beta1.proto import image_annotator_pb2 -from google.cloud.vision_v1p1beta1.proto import text_annotation_pb2 -from google.cloud.vision_v1p1beta1.proto import web_detection_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 from google.protobuf import wrappers_pb2 @@ -29,20 +23,38 @@ from google.type import color_pb2 from google.type import latlng_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.vision_v1p1beta1.proto import geometry_pb2 +from google.cloud.vision_v1p1beta1.proto import image_annotator_pb2 +from google.cloud.vision_v1p1beta1.proto import text_annotation_pb2 +from google.cloud.vision_v1p1beta1.proto import web_detection_pb2 + + +_shared_modules = [ + http_pb2, + any_pb2, + descriptor_pb2, + wrappers_pb2, + status_pb2, + color_pb2, + latlng_pb2, +] + +_local_modules = [ + geometry_pb2, + image_annotator_pb2, + text_annotation_pb2, + web_detection_pb2, +] + names = [] -for module in ( - http_pb2, - geometry_pb2, - image_annotator_pb2, - text_annotation_pb2, - web_detection_pb2, - any_pb2, - descriptor_pb2, - wrappers_pb2, - status_pb2, - color_pb2, - latlng_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.vision_v1p1beta1.types' setattr(sys.modules[__name__], name, message) diff --git a/vision/google/cloud/vision_v1p2beta1/types.py b/vision/google/cloud/vision_v1p2beta1/types.py index ee4a83eb5179..4c10ca08cc7e 100644 --- a/vision/google/cloud/vision_v1p2beta1/types.py +++ b/vision/google/cloud/vision_v1p2beta1/types.py @@ -15,13 +15,7 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 -from google.cloud.vision_v1p2beta1.proto import geometry_pb2 -from google.cloud.vision_v1p2beta1.proto import image_annotator_pb2 -from google.cloud.vision_v1p2beta1.proto import text_annotation_pb2 -from google.cloud.vision_v1p2beta1.proto import web_detection_pb2 from google.longrunning import operations_pb2 from google.protobuf import any_pb2 from google.protobuf import descriptor_pb2 @@ -32,23 +26,41 @@ from google.type import color_pb2 from google.type import latlng_pb2 +from google.api_core.protobuf_helpers import get_messages +from google.cloud.vision_v1p2beta1.proto import geometry_pb2 +from google.cloud.vision_v1p2beta1.proto import image_annotator_pb2 +from google.cloud.vision_v1p2beta1.proto import text_annotation_pb2 +from google.cloud.vision_v1p2beta1.proto import web_detection_pb2 + + +_shared_modules = [ + http_pb2, + operations_pb2, + any_pb2, + descriptor_pb2, + empty_pb2, + timestamp_pb2, + wrappers_pb2, + status_pb2, + color_pb2, + latlng_pb2, +] + +_local_modules = [ + geometry_pb2, + image_annotator_pb2, + text_annotation_pb2, + web_detection_pb2, +] + names = [] -for module in ( - http_pb2, - geometry_pb2, - image_annotator_pb2, - text_annotation_pb2, - web_detection_pb2, - operations_pb2, - any_pb2, - descriptor_pb2, - empty_pb2, - timestamp_pb2, - wrappers_pb2, - status_pb2, - color_pb2, - latlng_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.vision_v1p2beta1.types' setattr(sys.modules[__name__], name, message) diff --git a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/types.py b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/types.py index 18bb85a0d5fd..dd70ee0d8861 100644 --- a/websecurityscanner/google/cloud/websecurityscanner_v1alpha/types.py +++ b/websecurityscanner/google/cloud/websecurityscanner_v1alpha/types.py @@ -15,38 +15,50 @@ from __future__ import absolute_import import sys -from google.api_core.protobuf_helpers import get_messages - from google.api import http_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 + +from google.api_core.protobuf_helpers import get_messages from google.cloud.websecurityscanner_v1alpha.proto import crawled_url_pb2 from google.cloud.websecurityscanner_v1alpha.proto import finding_addon_pb2 from google.cloud.websecurityscanner_v1alpha.proto import finding_pb2 -from google.cloud.websecurityscanner_v1alpha.proto import \ - finding_type_stats_pb2 +from google.cloud.websecurityscanner_v1alpha.proto import ( + finding_type_stats_pb2 ) from google.cloud.websecurityscanner_v1alpha.proto import scan_config_pb2 from google.cloud.websecurityscanner_v1alpha.proto import scan_run_pb2 -from google.cloud.websecurityscanner_v1alpha.proto import \ - web_security_scanner_pb2 -from google.protobuf import descriptor_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 +from google.cloud.websecurityscanner_v1alpha.proto import ( + web_security_scanner_pb2 ) + + +_shared_modules = [ + http_pb2, + descriptor_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, +] + +_local_modules = [ + crawled_url_pb2, + finding_addon_pb2, + finding_pb2, + finding_type_stats_pb2, + scan_config_pb2, + scan_run_pb2, + web_security_scanner_pb2, +] names = [] -for module in ( - http_pb2, - crawled_url_pb2, - finding_addon_pb2, - finding_pb2, - finding_type_stats_pb2, - scan_config_pb2, - scan_run_pb2, - web_security_scanner_pb2, - descriptor_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, -): + +for module in _shared_modules: + for name, message in get_messages(module).items(): + setattr(sys.modules[__name__], name, message) + names.append(name) + +for module in _local_modules: for name, message in get_messages(module).items(): message.__module__ = 'google.cloud.websecurityscanner_v1alpha.types' setattr(sys.modules[__name__], name, message) From 04d1649c97c701fb7963197d70dad366d52558bd Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 22 May 2018 16:05:16 -0400 Subject: [PATCH 25/75] Prune systests identified as reduntant to snippets. (#5365) See @tswast's analysis: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/5003#issuecomment-385049005 Toward #5003. --- bigquery/tests/system.py | 98 ---------------------------------------- 1 file changed, 98 deletions(-) diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 4ab6c8b8c9b4..03465ff8d594 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -475,47 +475,6 @@ def test_insert_rows_then_dump_table(self): self.assertEqual(sorted(row_tuples, key=by_age), sorted(ROWS, key=by_age)) - def test_load_table_from_local_file_then_dump_table(self): - from google.cloud._testing import _NamedTemporaryFile - from google.cloud.bigquery.job import CreateDisposition - from google.cloud.bigquery.job import SourceFormat - from google.cloud.bigquery.job import WriteDisposition - - TABLE_NAME = 'test_table' - - dataset = self.temp_dataset(_make_dataset_id('load_local_then_dump')) - table_ref = dataset.table(TABLE_NAME) - table_arg = Table(table_ref, schema=SCHEMA) - table = retry_403(Config.CLIENT.create_table)(table_arg) - self.to_delete.insert(0, table) - - with _NamedTemporaryFile() as temp: - with open(temp.name, 'w') as csv_write: - writer = csv.writer(csv_write) - writer.writerow(HEADER_ROW) - writer.writerows(ROWS) - - with open(temp.name, 'rb') as csv_read: - config = bigquery.LoadJobConfig() - config.source_format = SourceFormat.CSV - config.skip_leading_rows = 1 - config.create_disposition = CreateDisposition.CREATE_NEVER - config.write_disposition = WriteDisposition.WRITE_EMPTY - config.schema = table.schema - job = Config.CLIENT.load_table_from_file( - csv_read, table_ref, job_config=config) - - # Retry until done. - job.result(timeout=JOB_TIMEOUT) - - self.assertEqual(job.output_rows, len(ROWS)) - - rows = self._fetch_single_page(table) - row_tuples = [r.values() for r in rows] - by_age = operator.itemgetter(1) - self.assertEqual(sorted(row_tuples, key=by_age), - sorted(ROWS, key=by_age)) - def test_load_table_from_local_avro_file_then_dump_table(self): from google.cloud.bigquery.job import SourceFormat from google.cloud.bigquery.job import WriteDisposition @@ -817,37 +776,6 @@ def test_extract_table(self): got = destination.download_as_string().decode('utf-8') self.assertIn('Bharney Rhubble', got) - def test_extract_table_w_job_config(self): - from google.cloud.storage import Client as StorageClient - from google.cloud.bigquery.job import DestinationFormat - - storage_client = StorageClient() - local_id = unique_resource_id() - bucket_name = 'bq_extract_test' + local_id - blob_name = 'person_ages.csv' - dataset_id = _make_dataset_id('load_gcs_then_extract') - table_id = 'test_table' - table_ref = Config.CLIENT.dataset(dataset_id).table(table_id) - table = Table(table_ref) - self.to_delete.insert(0, table) - self._load_table_for_extract_table( - storage_client, ROWS, bucket_name, blob_name, table_ref) - bucket = storage_client.bucket(bucket_name) - destination_blob_name = 'person_ages_out.csv' - destination = bucket.blob(destination_blob_name) - destination_uri = 'gs://{}/person_ages_out.csv'.format(bucket_name) - - config = bigquery.ExtractJobConfig() - config.destination_format = DestinationFormat.NEWLINE_DELIMITED_JSON - job = Config.CLIENT.extract_table( - table, destination_uri, job_config=config) - job.result() - - self.to_delete.insert(0, destination) - got = destination.download_as_string().decode('utf-8') - self.assertIn('"Bharney Rhubble"', got) - self.assertEqual(job.destination_uri_file_counts, [1]) - def test_copy_table(self): # If we create a new table to copy from, the test won't work # because the new rows will be stored in the streaming buffer, @@ -1538,32 +1466,6 @@ def test_dbapi_w_query_parameters(self): row = Config.CURSOR.fetchone() self.assertIsNone(row, msg=msg) - def test_dump_table_w_public_data(self): - PUBLIC = 'bigquery-public-data' - DATASET_ID = 'samples' - TABLE_NAME = 'natality' - - table_ref = DatasetReference(PUBLIC, DATASET_ID).table(TABLE_NAME) - table = Config.CLIENT.get_table(table_ref) - self._fetch_single_page(table) - - def test_dump_table_w_public_data_selected_fields(self): - PUBLIC = 'bigquery-public-data' - DATASET_ID = 'samples' - TABLE_NAME = 'natality' - selected_fields = [ - bigquery.SchemaField('year', 'INTEGER', mode='NULLABLE'), - bigquery.SchemaField('month', 'INTEGER', mode='NULLABLE'), - bigquery.SchemaField('day', 'INTEGER', mode='NULLABLE'), - ] - table_ref = DatasetReference(PUBLIC, DATASET_ID).table(TABLE_NAME) - - rows = self._fetch_single_page( - table_ref, selected_fields=selected_fields) - - self.assertGreater(len(rows), 0) - self.assertEqual(len(rows[0]), 3) - def test_large_query_w_public_data(self): PUBLIC = 'bigquery-public-data' DATASET_ID = 'samples' From 6b9b58d6e065634bf03df792e8953c2f1c76eb98 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 22 May 2018 13:17:03 -0700 Subject: [PATCH 26/75] Fixes #5353. Move manifest and include setup-readme in dist (#5366) --- MANIFEST.in | 2 -- legacy/google-cloud/MANIFEST.in | 2 ++ legacy/google-cloud/setup.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 MANIFEST.in create mode 100644 legacy/google-cloud/MANIFEST.in diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index e835c5e94a5a..000000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -include README.rst -global-exclude *.pyc diff --git a/legacy/google-cloud/MANIFEST.in b/legacy/google-cloud/MANIFEST.in new file mode 100644 index 000000000000..e73a89048601 --- /dev/null +++ b/legacy/google-cloud/MANIFEST.in @@ -0,0 +1,2 @@ +include setup-README.rst +global-exclude *.pyc diff --git a/legacy/google-cloud/setup.py b/legacy/google-cloud/setup.py index 7528d557594c..374382197d1a 100644 --- a/legacy/google-cloud/setup.py +++ b/legacy/google-cloud/setup.py @@ -76,7 +76,7 @@ setup( name='google-cloud', - version='0.33.0', + version='0.33.1', description='API Client library for Google Cloud', long_description=README, install_requires=REQUIREMENTS, From df731c31c881cf796dcca6f37f4565b07d67cadd Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 22 May 2018 15:42:18 -0700 Subject: [PATCH 27/75] Add v1 Endpoint for IoT (#5355) Generate IoT v1 --- iot/LICENSE | 201 +++ iot/MANIFEST.in | 5 + iot/README.rst | 102 ++ iot/docs/conf.py | 310 ++++ iot/docs/gapic/v1/api.rst | 6 + iot/docs/gapic/v1/types.rst | 5 + iot/docs/index.rst | 110 ++ iot/google/__init__.py | 20 + iot/google/cloud/__init__.py | 20 + iot/google/cloud/iot.py | 25 + iot/google/cloud/iot_v1/__init__.py | 31 + iot/google/cloud/iot_v1/gapic/__init__.py | 0 .../iot_v1/gapic/device_manager_client.py | 1372 +++++++++++++++++ .../gapic/device_manager_client_config.py | 114 ++ iot/google/cloud/iot_v1/gapic/enums.py | 92 ++ iot/google/cloud/iot_v1/proto/__init__.py | 0 .../cloud/iot_v1/proto/device_manager_pb2.py | 1265 +++++++++++++++ .../iot_v1/proto/device_manager_pb2_grpc.py | 316 ++++ .../cloud/iot_v1/proto/resources_pb2.py | 1299 ++++++++++++++++ .../cloud/iot_v1/proto/resources_pb2_grpc.py | 3 + iot/google/cloud/iot_v1/types.py | 55 + iot/nox.py | 76 + iot/setup.cfg | 2 + iot/setup.py | 74 + .../gapic/v1/test_system_device_manager_v1.py | 28 + .../gapic/v1/test_device_manager_client_v1.py | 654 ++++++++ 26 files changed, 6185 insertions(+) create mode 100644 iot/LICENSE create mode 100644 iot/MANIFEST.in create mode 100644 iot/README.rst create mode 100644 iot/docs/conf.py create mode 100644 iot/docs/gapic/v1/api.rst create mode 100644 iot/docs/gapic/v1/types.rst create mode 100644 iot/docs/index.rst create mode 100644 iot/google/__init__.py create mode 100644 iot/google/cloud/__init__.py create mode 100644 iot/google/cloud/iot.py create mode 100644 iot/google/cloud/iot_v1/__init__.py create mode 100644 iot/google/cloud/iot_v1/gapic/__init__.py create mode 100644 iot/google/cloud/iot_v1/gapic/device_manager_client.py create mode 100644 iot/google/cloud/iot_v1/gapic/device_manager_client_config.py create mode 100644 iot/google/cloud/iot_v1/gapic/enums.py create mode 100644 iot/google/cloud/iot_v1/proto/__init__.py create mode 100644 iot/google/cloud/iot_v1/proto/device_manager_pb2.py create mode 100644 iot/google/cloud/iot_v1/proto/device_manager_pb2_grpc.py create mode 100644 iot/google/cloud/iot_v1/proto/resources_pb2.py create mode 100644 iot/google/cloud/iot_v1/proto/resources_pb2_grpc.py create mode 100644 iot/google/cloud/iot_v1/types.py create mode 100644 iot/nox.py create mode 100644 iot/setup.cfg create mode 100644 iot/setup.py create mode 100644 iot/tests/system/gapic/v1/test_system_device_manager_v1.py create mode 100644 iot/tests/unit/gapic/v1/test_device_manager_client_v1.py diff --git a/iot/LICENSE b/iot/LICENSE new file mode 100644 index 000000000000..a8ee855de2aa --- /dev/null +++ b/iot/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/iot/MANIFEST.in b/iot/MANIFEST.in new file mode 100644 index 000000000000..9cbf175afe6b --- /dev/null +++ b/iot/MANIFEST.in @@ -0,0 +1,5 @@ +include README.rst LICENSE +recursive-include google *.json *.proto +recursive-include tests * +global-exclude *.py[co] +global-exclude __pycache__ diff --git a/iot/README.rst b/iot/README.rst new file mode 100644 index 000000000000..7146d4545ebc --- /dev/null +++ b/iot/README.rst @@ -0,0 +1,102 @@ +Python Client for Cloud IoT API (`Alpha`_) +========================================== + +`Cloud IoT API`_: Registers and manages IoT (Internet of Things) devices that connect to the +Google Cloud Platform. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +.. _Cloud IoT API: https://cloud.google.com/iot +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/iot/usage.html +.. _Product Documentation: https://cloud.google.com/iot + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud IoT API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud IoT API.: https://cloud.google.com/iot +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-iot + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-iot + +Preview +~~~~~~~ + +DeviceManagerClient +^^^^^^^^^^^^^^^^^^^ + +.. code:: py + + from google.cloud import iot_v1 + + client = iot_v1.DeviceManagerClient() + + parent = client.location_path('[PROJECT]', '[LOCATION]') + + + # Iterate over all results + for element in client.list_device_registries(parent): + # process element + pass + + # Or iterate over results one page at a time + for page in client.list_device_registries(parent, options=CallOptions(page_token=INITIAL_PAGE)): + for element in page: + # process element + pass + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud IoT API + API to see other available methods on the client. +- Read the `Cloud IoT API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud IoT API Product documentation: https://cloud.google.com/iot +.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst \ No newline at end of file diff --git a/iot/docs/conf.py b/iot/docs/conf.py new file mode 100644 index 000000000000..66735065c495 --- /dev/null +++ b/iot/docs/conf.py @@ -0,0 +1,310 @@ +# -*- coding: utf-8 -*- +# +# google-cloud-iot documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + +__version__ = '0.1.0' + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.coverage', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', +] + +# autodoc/autosummary flags +autoclass_content = 'both' +autodoc_default_flags = ['members'] +autosummary_generate = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'google-cloud-iot' +copyright = u'2017, Google' +author = u'Google APIs' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = '.'.join(release.split('.')[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'sphinx_rtd_theme' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = [] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'google-cloud-iot-doc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + #'preamble': '', + + # Latex figure (float) alignment + #'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'google-cloud-iot.tex', u'google-cloud-iot Documentation', + author, 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [(master_doc, 'google-cloud-iot', + u'google-cloud-iot Documentation', [author], 1)] + +# If true, show URL addresses after external links. +#man_show_urls = False + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'google-cloud-iot', u'google-cloud-iot Documentation', author, + 'google-cloud-iot', + 'GAPIC library for the {metadata.shortName} v1 service', 'APIs'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python': ('http://python.readthedocs.org/en/latest/', None), + 'gax': ('https://gax-python.readthedocs.org/en/latest/', None), +} + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/iot/docs/gapic/v1/api.rst b/iot/docs/gapic/v1/api.rst new file mode 100644 index 000000000000..dc91697f1d89 --- /dev/null +++ b/iot/docs/gapic/v1/api.rst @@ -0,0 +1,6 @@ +Client for Cloud IoT API +======================== + +.. automodule:: google.cloud.iot_v1 + :members: + :inherited-members: \ No newline at end of file diff --git a/iot/docs/gapic/v1/types.rst b/iot/docs/gapic/v1/types.rst new file mode 100644 index 000000000000..fa7bab9df0d9 --- /dev/null +++ b/iot/docs/gapic/v1/types.rst @@ -0,0 +1,5 @@ +Types for Cloud IoT API Client +============================== + +.. automodule:: google.cloud.iot_v1.types + :members: \ No newline at end of file diff --git a/iot/docs/index.rst b/iot/docs/index.rst new file mode 100644 index 000000000000..366ebac9f2dc --- /dev/null +++ b/iot/docs/index.rst @@ -0,0 +1,110 @@ +Python Client for Cloud IoT API (`Alpha`_) +========================================== + +`Cloud IoT API`_: Registers and manages IoT (Internet of Things) devices that connect to the +Google Cloud Platform. + +- `Client Library Documentation`_ +- `Product Documentation`_ + +.. _Alpha: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst +.. _Cloud IoT API: https://cloud.google.com/iot +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/iot/usage.html +.. _Product Documentation: https://cloud.google.com/iot + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. `Enable the Cloud IoT API.`_ +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Enable the Cloud IoT API.: https://cloud.google.com/iot +.. _Setup Authentication.: https://googlecloudplatform.github.io/google-cloud-python/stable/core/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + source /bin/activate + /bin/pip install google-cloud-iot + + +Windows +^^^^^^^ + +.. code-block:: console + + pip install virtualenv + virtualenv + \Scripts\activate + \Scripts\pip.exe install google-cloud-iot + +Preview +~~~~~~~ + +DeviceManagerClient +^^^^^^^^^^^^^^^^^^^ + +.. code:: py + + from google.cloud import iot_v1 + + client = iot_v1.DeviceManagerClient() + + parent = client.location_path('[PROJECT]', '[LOCATION]') + + + # Iterate over all results + for element in client.list_device_registries(parent): + # process element + pass + + # Or iterate over results one page at a time + for page in client.list_device_registries(parent, options=CallOptions(page_token=INITIAL_PAGE)): + for element in page: + # process element + pass + +Next Steps +~~~~~~~~~~ + +- Read the `Client Library Documentation`_ for Cloud IoT API + API to see other available methods on the client. +- Read the `Cloud IoT API Product documentation`_ to learn + more about the product and see How-to Guides. +- View this `repository’s main README`_ to see the full list of Cloud + APIs that we cover. + +.. _Cloud IoT API Product documentation: https://cloud.google.com/iot +.. _repository’s main README: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst + +Api Reference +------------- +.. toctree:: + :maxdepth: 2 + + gapic/v1/api + gapic/v1/types \ No newline at end of file diff --git a/iot/google/__init__.py b/iot/google/__init__.py new file mode 100644 index 000000000000..855a707300e1 --- /dev/null +++ b/iot/google/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) \ No newline at end of file diff --git a/iot/google/cloud/__init__.py b/iot/google/cloud/__init__.py new file mode 100644 index 000000000000..855a707300e1 --- /dev/null +++ b/iot/google/cloud/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +try: + import pkg_resources + pkg_resources.declare_namespace(__name__) +except ImportError: + import pkgutil + __path__ = pkgutil.extend_path(__path__, __name__) \ No newline at end of file diff --git a/iot/google/cloud/iot.py b/iot/google/cloud/iot.py new file mode 100644 index 000000000000..0c77f4c7bf08 --- /dev/null +++ b/iot/google/cloud/iot.py @@ -0,0 +1,25 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.iot_v1 import DeviceManagerClient +from google.cloud.iot_v1 import enums +from google.cloud.iot_v1 import types + +__all__ = ( + 'enums', + 'types', + 'DeviceManagerClient', +) diff --git a/iot/google/cloud/iot_v1/__init__.py b/iot/google/cloud/iot_v1/__init__.py new file mode 100644 index 000000000000..d283a951cb38 --- /dev/null +++ b/iot/google/cloud/iot_v1/__init__.py @@ -0,0 +1,31 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +from google.cloud.iot_v1 import types +from google.cloud.iot_v1.gapic import device_manager_client +from google.cloud.iot_v1.gapic import enums + + +class DeviceManagerClient(device_manager_client.DeviceManagerClient): + __doc__ = device_manager_client.DeviceManagerClient.__doc__ + enums = enums + + +__all__ = ( + 'enums', + 'types', + 'DeviceManagerClient', +) diff --git a/iot/google/cloud/iot_v1/gapic/__init__.py b/iot/google/cloud/iot_v1/gapic/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/iot/google/cloud/iot_v1/gapic/device_manager_client.py b/iot/google/cloud/iot_v1/gapic/device_manager_client.py new file mode 100644 index 000000000000..63fc038264c8 --- /dev/null +++ b/iot/google/cloud/iot_v1/gapic/device_manager_client.py @@ -0,0 +1,1372 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Accesses the google.cloud.iot.v1 DeviceManager API.""" + +import functools +import pkg_resources + +import google.api_core.gapic_v1.client_info +import google.api_core.gapic_v1.config +import google.api_core.gapic_v1.method +import google.api_core.gapic_v1.routing_header +import google.api_core.grpc_helpers +import google.api_core.page_iterator +import google.api_core.path_template +import grpc + +from google.cloud.iot_v1.gapic import device_manager_client_config +from google.cloud.iot_v1.gapic import enums +from google.cloud.iot_v1.proto import device_manager_pb2 +from google.cloud.iot_v1.proto import device_manager_pb2_grpc +from google.cloud.iot_v1.proto import resources_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + +_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( + 'google-cloud-iot', ).version + + +class DeviceManagerClient(object): + """ + Internet of things (IoT) service. Allows to manipulate device registry + instances and the registration of devices (Things) to the cloud. + """ + + SERVICE_ADDRESS = 'cloudiot.googleapis.com:443' + """The default address of the service.""" + + # The scopes needed to make gRPC calls to all of the methods defined in + # this service + _DEFAULT_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + 'https://www.googleapis.com/auth/cloudiot', + ) + + # The name of the interface for this client. This is the key used to find + # method configuration in the client_config dictionary. + _INTERFACE_NAME = 'google.cloud.iot.v1.DeviceManager' + + @classmethod + def location_path(cls, project, location): + """Return a fully-qualified location string.""" + return google.api_core.path_template.expand( + 'projects/{project}/locations/{location}', + project=project, + location=location, + ) + + @classmethod + def registry_path(cls, project, location, registry): + """Return a fully-qualified registry string.""" + return google.api_core.path_template.expand( + 'projects/{project}/locations/{location}/registries/{registry}', + project=project, + location=location, + registry=registry, + ) + + @classmethod + def device_path(cls, project, location, registry, device): + """Return a fully-qualified device string.""" + return google.api_core.path_template.expand( + 'projects/{project}/locations/{location}/registries/{registry}/devices/{device}', + project=project, + location=location, + registry=registry, + device=device, + ) + + def __init__(self, + channel=None, + credentials=None, + client_config=device_manager_client_config.config, + client_info=None): + """Constructor. + + Args: + channel (grpc.Channel): A ``Channel`` instance through + which to make calls. This argument is mutually exclusive + with ``credentials``; providing both will raise an exception. + credentials (google.auth.credentials.Credentials): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_config (dict): A dictionary of call options for each + method. If not specified, the default configuration is used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + """ + # If both `channel` and `credentials` are specified, raise an + # exception (channels come with credentials baked in already). + if channel is not None and credentials is not None: + raise ValueError( + 'The `channel` and `credentials` arguments to {} are mutually ' + 'exclusive.'.format(self.__class__.__name__), ) + + # Create the channel. + self.channel = channel + if self.channel is None: + self.channel = google.api_core.grpc_helpers.create_channel( + self.SERVICE_ADDRESS, + credentials=credentials, + scopes=self._DEFAULT_SCOPES, + ) + + # Create the gRPC stubs. + self._device_manager_stub = (device_manager_pb2_grpc.DeviceManagerStub( + self.channel)) + + if client_info is None: + client_info = ( + google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO) + client_info.gapic_version = _GAPIC_LIBRARY_VERSION + self._client_info = client_info + + # Parse out the default settings for retry and timeout for each RPC + # from the client configuration. + # (Ordinarily, these are the defaults specified in the `*_config.py` + # file next to this one.) + self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( + client_config['interfaces'][self._INTERFACE_NAME], ) + + self._inner_api_calls = {} + + def _intercept_channel(self, *interceptors): + """ Experimental. Bind gRPC interceptors to the gRPC channel. + + Args: + interceptors (*Union[grpc.UnaryUnaryClientInterceptor, grpc.UnaryStreamingClientInterceptor, grpc.StreamingUnaryClientInterceptor, grpc.StreamingStreamingClientInterceptor]): + Zero or more gRPC interceptors. Interceptors are given control in the order + they are listed. + Raises: + TypeError: If interceptor does not derive from any of + UnaryUnaryClientInterceptor, + UnaryStreamClientInterceptor, + StreamUnaryClientInterceptor, or + StreamStreamClientInterceptor. + """ + self.channel = grpc.intercept_channel(self.channel, *interceptors) + self._device_manager_stub = (device_manager_pb2_grpc.DeviceManagerStub( + self.channel)) + self._inner_api_calls.clear() + + # Service calls + def create_device_registry(self, + parent, + device_registry, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a device registry that contains devices. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> # TODO: Initialize ``device_registry``: + >>> device_registry = {} + >>> + >>> response = client.create_device_registry(parent, device_registry) + + Args: + parent (str): The project and cloud region where this device registry must be created. + For example, ``projects/example-project/locations/us-central1``. + device_registry (Union[dict, ~google.cloud.iot_v1.types.DeviceRegistry]): The device registry. The field ``name`` must be empty. The server will + generate that field from the device registry ``id`` provided and the + ``parent`` field. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.iot_v1.types.DeviceRegistry` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.DeviceRegistry` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'create_device_registry' not in self._inner_api_calls: + self._inner_api_calls[ + 'create_device_registry'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.CreateDeviceRegistry, + default_retry=self._method_configs['CreateDeviceRegistry'] + .retry, + default_timeout=self._method_configs[ + 'CreateDeviceRegistry'].timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.CreateDeviceRegistryRequest( + parent=parent, + device_registry=device_registry, + ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('parent', parent)], ) + metadata.append(routing_header) + + return self._inner_api_calls['create_device_registry']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def get_device_registry(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets a device registry configuration. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> name = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + >>> + >>> response = client.get_device_registry(name) + + Args: + name (str): The name of the device registry. For example, + ``projects/example-project/locations/us-central1/registries/my-registry``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.DeviceRegistry` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'get_device_registry' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_device_registry'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.GetDeviceRegistry, + default_retry=self._method_configs[ + 'GetDeviceRegistry'].retry, + default_timeout=self._method_configs['GetDeviceRegistry'] + .timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.GetDeviceRegistryRequest(name=name, ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('name', name)], ) + metadata.append(routing_header) + + return self._inner_api_calls['get_device_registry']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def update_device_registry(self, + device_registry, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Updates a device registry configuration. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> # TODO: Initialize ``device_registry``: + >>> device_registry = {} + >>> + >>> # TODO: Initialize ``update_mask``: + >>> update_mask = {} + >>> + >>> response = client.update_device_registry(device_registry, update_mask) + + Args: + device_registry (Union[dict, ~google.cloud.iot_v1.types.DeviceRegistry]): The new values for the device registry. The ``id`` field must be empty, and + the ``name`` field must indicate the path of the resource. For example, + ``projects/example-project/locations/us-central1/registries/my-registry``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.iot_v1.types.DeviceRegistry` + update_mask (Union[dict, ~google.cloud.iot_v1.types.FieldMask]): Only updates the ``device_registry`` fields indicated by this mask. + The field mask must not be empty, and it must not contain fields that + are immutable or only set by the server. + Mutable top-level fields: ``event_notification_config``, ``http_config``, + ``mqtt_config``, and ``state_notification_config``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.iot_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.DeviceRegistry` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'update_device_registry' not in self._inner_api_calls: + self._inner_api_calls[ + 'update_device_registry'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.UpdateDeviceRegistry, + default_retry=self._method_configs['UpdateDeviceRegistry'] + .retry, + default_timeout=self._method_configs[ + 'UpdateDeviceRegistry'].timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.UpdateDeviceRegistryRequest( + device_registry=device_registry, + update_mask=update_mask, + ) + + if hasattr(device_registry, 'name'): + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('device_registry.name', device_registry.name)], ) + metadata.append(routing_header) + + return self._inner_api_calls['update_device_registry']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def delete_device_registry(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes a device registry configuration. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> name = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + >>> + >>> client.delete_device_registry(name) + + Args: + name (str): The name of the device registry. For example, + ``projects/example-project/locations/us-central1/registries/my-registry``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'delete_device_registry' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_device_registry'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.DeleteDeviceRegistry, + default_retry=self._method_configs['DeleteDeviceRegistry'] + .retry, + default_timeout=self._method_configs[ + 'DeleteDeviceRegistry'].timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.DeleteDeviceRegistryRequest(name=name, ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('name', name)], ) + metadata.append(routing_header) + + self._inner_api_calls['delete_device_registry']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def list_device_registries(self, + parent, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists device registries. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> parent = client.location_path('[PROJECT]', '[LOCATION]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_device_registries(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_device_registries(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): The project and cloud region path. For example, + ``projects/example-project/locations/us-central1``. + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.iot_v1.types.DeviceRegistry` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'list_device_registries' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_device_registries'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.ListDeviceRegistries, + default_retry=self._method_configs['ListDeviceRegistries'] + .retry, + default_timeout=self._method_configs[ + 'ListDeviceRegistries'].timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.ListDeviceRegistriesRequest( + parent=parent, + page_size=page_size, + ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('parent', parent)], ) + metadata.append(routing_header) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls['list_device_registries'], + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='device_registries', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def create_device(self, + parent, + device, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Creates a device in a device registry. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + >>> + >>> # TODO: Initialize ``device``: + >>> device = {} + >>> + >>> response = client.create_device(parent, device) + + Args: + parent (str): The name of the device registry where this device should be created. + For example, + ``projects/example-project/locations/us-central1/registries/my-registry``. + device (Union[dict, ~google.cloud.iot_v1.types.Device]): The device registration details. The field ``name`` must be empty. The server + will generate that field from the device registry ``id`` provided and the + ``parent`` field. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.iot_v1.types.Device` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.Device` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'create_device' not in self._inner_api_calls: + self._inner_api_calls[ + 'create_device'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.CreateDevice, + default_retry=self._method_configs['CreateDevice'].retry, + default_timeout=self._method_configs['CreateDevice'] + .timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.CreateDeviceRequest( + parent=parent, + device=device, + ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('parent', parent)], ) + metadata.append(routing_header) + + return self._inner_api_calls['create_device']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def get_device(self, + name, + field_mask=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets details about a device. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', '[DEVICE]') + >>> + >>> response = client.get_device(name) + + Args: + name (str): The name of the device. For example, + ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or + ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + field_mask (Union[dict, ~google.cloud.iot_v1.types.FieldMask]): The fields of the ``Device`` resource to be returned in the response. If the + field mask is unset or empty, all fields are returned. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.iot_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.Device` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'get_device' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_device'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.GetDevice, + default_retry=self._method_configs['GetDevice'].retry, + default_timeout=self._method_configs['GetDevice'].timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.GetDeviceRequest( + name=name, + field_mask=field_mask, + ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('name', name)], ) + metadata.append(routing_header) + + return self._inner_api_calls['get_device']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def update_device(self, + device, + update_mask, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Updates a device. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> # TODO: Initialize ``device``: + >>> device = {} + >>> + >>> # TODO: Initialize ``update_mask``: + >>> update_mask = {} + >>> + >>> response = client.update_device(device, update_mask) + + Args: + device (Union[dict, ~google.cloud.iot_v1.types.Device]): The new values for the device registry. The ``id`` and ``num_id`` fields must + be empty, and the field ``name`` must specify the name path. For example, + ``projects/p0/locations/us-central1/registries/registry0/devices/device0``or + ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.iot_v1.types.Device` + update_mask (Union[dict, ~google.cloud.iot_v1.types.FieldMask]): Only updates the ``device`` fields indicated by this mask. + The field mask must not be empty, and it must not contain fields that + are immutable or only set by the server. + Mutable top-level fields: ``credentials``, ``blocked``, and ``metadata`` + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.iot_v1.types.FieldMask` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.Device` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'update_device' not in self._inner_api_calls: + self._inner_api_calls[ + 'update_device'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.UpdateDevice, + default_retry=self._method_configs['UpdateDevice'].retry, + default_timeout=self._method_configs['UpdateDevice'] + .timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.UpdateDeviceRequest( + device=device, + update_mask=update_mask, + ) + + if hasattr(device, 'name'): + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('device.name', device.name)], ) + metadata.append(routing_header) + + return self._inner_api_calls['update_device']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def delete_device(self, + name, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Deletes a device. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', '[DEVICE]') + >>> + >>> client.delete_device(name) + + Args: + name (str): The name of the device. For example, + ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or + ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'delete_device' not in self._inner_api_calls: + self._inner_api_calls[ + 'delete_device'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.DeleteDevice, + default_retry=self._method_configs['DeleteDevice'].retry, + default_timeout=self._method_configs['DeleteDevice'] + .timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.DeleteDeviceRequest(name=name, ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('name', name)], ) + metadata.append(routing_header) + + self._inner_api_calls['delete_device']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def list_devices(self, + parent, + device_num_ids=None, + device_ids=None, + field_mask=None, + page_size=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + List devices in a device registry. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + >>> + >>> + >>> # Iterate over all results + >>> for element in client.list_devices(parent): + ... # process element + ... pass + >>> + >>> # Or iterate over results one page at a time + >>> for page in client.list_devices(parent, options=CallOptions(page_token=INITIAL_PAGE)): + ... for element in page: + ... # process element + ... pass + + Args: + parent (str): The device registry path. Required. For example, + ``projects/my-project/locations/us-central1/registries/my-registry``. + device_num_ids (list[long]): A list of device numerical ids. If empty, it will ignore this field. This + field cannot hold more than 10,000 entries. + device_ids (list[str]): A list of device string identifiers. If empty, it will ignore this field. + For example, ``['device0', 'device12']``. This field cannot hold more than + 10,000 entries. + field_mask (Union[dict, ~google.cloud.iot_v1.types.FieldMask]): The fields of the ``Device`` resource to be returned in the response. The + fields ``id``, and ``num_id`` are always returned by default, along with any + other fields specified. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.iot_v1.types.FieldMask` + page_size (int): The maximum number of resources contained in the + underlying API response. If page streaming is performed per- + resource, this parameter does not affect the return value. If page + streaming is performed per-page, this determines the maximum number + of resources in a page. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.gax.PageIterator` instance. By default, this + is an iterable of :class:`~google.cloud.iot_v1.types.Device` instances. + This object can also be configured to iterate over the pages + of the response through the `options` parameter. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'list_devices' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_devices'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.ListDevices, + default_retry=self._method_configs['ListDevices'].retry, + default_timeout=self._method_configs['ListDevices'] + .timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.ListDevicesRequest( + parent=parent, + device_num_ids=device_num_ids, + device_ids=device_ids, + field_mask=field_mask, + page_size=page_size, + ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('parent', parent)], ) + metadata.append(routing_header) + + iterator = google.api_core.page_iterator.GRPCIterator( + client=None, + method=functools.partial( + self._inner_api_calls['list_devices'], + retry=retry, + timeout=timeout, + metadata=metadata), + request=request, + items_field='devices', + request_token_field='page_token', + response_token_field='next_page_token', + ) + return iterator + + def modify_cloud_to_device_config( + self, + name, + binary_data, + version_to_update=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Modifies the configuration for the device, which is eventually sent from + the Cloud IoT Core servers. Returns the modified configuration version and + its metadata. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', '[DEVICE]') + >>> + >>> # TODO: Initialize ``binary_data``: + >>> binary_data = b'' + >>> + >>> response = client.modify_cloud_to_device_config(name, binary_data) + + Args: + name (str): The name of the device. For example, + ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or + ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + binary_data (bytes): The configuration data for the device. + version_to_update (long): The version number to update. If this value is zero, it will not check the + version number of the server and will always update the current version; + otherwise, this update will fail if the version number found on the server + does not match this version number. This is used to support multiple + simultaneous updates without losing data. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.DeviceConfig` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'modify_cloud_to_device_config' not in self._inner_api_calls: + self._inner_api_calls[ + 'modify_cloud_to_device_config'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.ModifyCloudToDeviceConfig, + default_retry=self._method_configs[ + 'ModifyCloudToDeviceConfig'].retry, + default_timeout=self._method_configs[ + 'ModifyCloudToDeviceConfig'].timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.ModifyCloudToDeviceConfigRequest( + name=name, + binary_data=binary_data, + version_to_update=version_to_update, + ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('name', name)], ) + metadata.append(routing_header) + + return self._inner_api_calls['modify_cloud_to_device_config']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def list_device_config_versions( + self, + name, + num_versions=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists the last few versions of the device configuration in descending + order (i.e.: newest first). + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', '[DEVICE]') + >>> + >>> response = client.list_device_config_versions(name) + + Args: + name (str): The name of the device. For example, + ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or + ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + num_versions (int): The number of versions to list. Versions are listed in decreasing order of + the version number. The maximum number of versions retained is 10. If this + value is zero, it will return all the versions available. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.ListDeviceConfigVersionsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'list_device_config_versions' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_device_config_versions'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.ListDeviceConfigVersions, + default_retry=self._method_configs[ + 'ListDeviceConfigVersions'].retry, + default_timeout=self._method_configs[ + 'ListDeviceConfigVersions'].timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.ListDeviceConfigVersionsRequest( + name=name, + num_versions=num_versions, + ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('name', name)], ) + metadata.append(routing_header) + + return self._inner_api_calls['list_device_config_versions']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def list_device_states(self, + name, + num_states=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Lists the last few versions of the device state in descending order (i.e.: + newest first). + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', '[DEVICE]') + >>> + >>> response = client.list_device_states(name) + + Args: + name (str): The name of the device. For example, + ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or + ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + num_states (int): The number of states to list. States are listed in descending order of + update time. The maximum number of states retained is 10. If this + value is zero, it will return all the states available. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.ListDeviceStatesResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'list_device_states' not in self._inner_api_calls: + self._inner_api_calls[ + 'list_device_states'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.ListDeviceStates, + default_retry=self._method_configs[ + 'ListDeviceStates'].retry, + default_timeout=self._method_configs['ListDeviceStates'] + .timeout, + client_info=self._client_info, + ) + + request = device_manager_pb2.ListDeviceStatesRequest( + name=name, + num_states=num_states, + ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('name', name)], ) + metadata.append(routing_header) + + return self._inner_api_calls['list_device_states']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def set_iam_policy(self, + resource, + policy, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Sets the access control policy on the specified resource. Replaces any + existing policy. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> resource = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + >>> + >>> # TODO: Initialize ``policy``: + >>> policy = {} + >>> + >>> response = client.set_iam_policy(resource, policy) + + Args: + resource (str): REQUIRED: The resource for which the policy is being specified. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + policy (Union[dict, ~google.cloud.iot_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The size of + the policy is limited to a few 10s of KB. An empty policy is a + valid policy but certain Cloud Platform services (such as Projects) + might reject them. + If a dict is provided, it must be of the same form as the protobuf + message :class:`~google.cloud.iot_v1.types.Policy` + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.Policy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'set_iam_policy' not in self._inner_api_calls: + self._inner_api_calls[ + 'set_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.SetIamPolicy, + default_retry=self._method_configs['SetIamPolicy'].retry, + default_timeout=self._method_configs['SetIamPolicy'] + .timeout, + client_info=self._client_info, + ) + + request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, + policy=policy, + ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('resource', resource)], ) + metadata.append(routing_header) + + return self._inner_api_calls['set_iam_policy']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def get_iam_policy(self, + resource, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> resource = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + >>> + >>> response = client.get_iam_policy(resource) + + Args: + resource (str): REQUIRED: The resource for which the policy is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.Policy` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'get_iam_policy' not in self._inner_api_calls: + self._inner_api_calls[ + 'get_iam_policy'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.GetIamPolicy, + default_retry=self._method_configs['GetIamPolicy'].retry, + default_timeout=self._method_configs['GetIamPolicy'] + .timeout, + client_info=self._client_info, + ) + + request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('resource', resource)], ) + metadata.append(routing_header) + + return self._inner_api_calls['get_iam_policy']( + request, retry=retry, timeout=timeout, metadata=metadata) + + def test_iam_permissions(self, + resource, + permissions, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None): + """ + Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + + Example: + >>> from google.cloud import iot_v1 + >>> + >>> client = iot_v1.DeviceManagerClient() + >>> + >>> resource = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + >>> + >>> # TODO: Initialize ``permissions``: + >>> permissions = [] + >>> + >>> response = client.test_iam_permissions(resource, permissions) + + Args: + resource (str): REQUIRED: The resource for which the policy detail is being requested. + ``resource`` is usually specified as a path. For example, a Project + resource is specified as ``projects/{project}``. + permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with + wildcards (such as '*' or 'storage.*') are not allowed. For more + information see + `IAM Overview `_. + retry (Optional[google.api_core.retry.Retry]): A retry object used + to retry requests. If ``None`` is specified, requests will not + be retried. + timeout (Optional[float]): The amount of time, in seconds, to wait + for the request to complete. Note that if ``retry`` is + specified, the timeout applies to each individual attempt. + metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata + that is provided to the method. + + Returns: + A :class:`~google.cloud.iot_v1.types.TestIamPermissionsResponse` instance. + + Raises: + google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. + google.api_core.exceptions.RetryError: If the request failed due + to a retryable error and retry attempts failed. + ValueError: If the parameters are invalid. + """ + if metadata is None: + metadata = [] + metadata = list(metadata) + if 'test_iam_permissions' not in self._inner_api_calls: + self._inner_api_calls[ + 'test_iam_permissions'] = google.api_core.gapic_v1.method.wrap_method( + self._device_manager_stub.TestIamPermissions, + default_retry=self._method_configs[ + 'TestIamPermissions'].retry, + default_timeout=self._method_configs['TestIamPermissions'] + .timeout, + client_info=self._client_info, + ) + + request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, + permissions=permissions, + ) + + routing_header = google.api_core.gapic_v1.routing_header.to_grpc_metadata( + [('resource', resource)], ) + metadata.append(routing_header) + + return self._inner_api_calls['test_iam_permissions']( + request, retry=retry, timeout=timeout, metadata=metadata) diff --git a/iot/google/cloud/iot_v1/gapic/device_manager_client_config.py b/iot/google/cloud/iot_v1/gapic/device_manager_client_config.py new file mode 100644 index 000000000000..5a1c12b80b5d --- /dev/null +++ b/iot/google/cloud/iot_v1/gapic/device_manager_client_config.py @@ -0,0 +1,114 @@ +config = { + "interfaces": { + "google.cloud.iot.v1.DeviceManager": { + "retry_codes": { + "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], + "non_idempotent": [], + "rate_limited_aware": + ["DEADLINE_EXCEEDED", "RESOURCE_EXHAUSTED", "UNAVAILABLE"] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 120000 + }, + "rate_limited_aware": { + "initial_retry_delay_millis": 1000, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 20000, + "rpc_timeout_multiplier": 1.0, + "max_rpc_timeout_millis": 20000, + "total_timeout_millis": 120000 + } + }, + "methods": { + "CreateDeviceRegistry": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetDeviceRegistry": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "UpdateDeviceRegistry": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteDeviceRegistry": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListDeviceRegistries": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "CreateDevice": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetDevice": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "UpdateDevice": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteDevice": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListDevices": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ModifyCloudToDeviceConfig": { + "timeout_millis": 60000, + "retry_codes_name": "rate_limited_aware", + "retry_params_name": "rate_limited_aware" + }, + "ListDeviceConfigVersions": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListDeviceStates": { + "timeout_millis": 60000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "SetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetIamPolicy": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "TestIamPermissions": { + "timeout_millis": 60000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/iot/google/cloud/iot_v1/gapic/enums.py b/iot/google/cloud/iot_v1/gapic/enums.py new file mode 100644 index 000000000000..8758593a3c3e --- /dev/null +++ b/iot/google/cloud/iot_v1/gapic/enums.py @@ -0,0 +1,92 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Wrappers for protocol buffer enum types.""" + +import enum + + +class MqttState(enum.IntEnum): + """ + Indicates whether an MQTT connection is enabled or disabled. See the field + description for details. + + Attributes: + MQTT_STATE_UNSPECIFIED (int): No MQTT state specified. If not specified, MQTT will be enabled by default. + MQTT_ENABLED (int): Enables a MQTT connection. + MQTT_DISABLED (int): Disables a MQTT connection. + """ + MQTT_STATE_UNSPECIFIED = 0 + MQTT_ENABLED = 1 + MQTT_DISABLED = 2 + + +class HttpState(enum.IntEnum): + """ + Indicates whether DeviceService (HTTP) is enabled or disabled for the + registry. See the field description for details. + + Attributes: + HTTP_STATE_UNSPECIFIED (int): No HTTP state specified. If not specified, DeviceService will be + enabled by default. + HTTP_ENABLED (int): Enables DeviceService (HTTP) service for the registry. + HTTP_DISABLED (int): Disables DeviceService (HTTP) service for the registry. + """ + HTTP_STATE_UNSPECIFIED = 0 + HTTP_ENABLED = 1 + HTTP_DISABLED = 2 + + +class PublicKeyCertificateFormat(enum.IntEnum): + """ + The supported formats for the public key. + + Attributes: + UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT (int): The format has not been specified. This is an invalid default value and + must not be used. + X509_CERTIFICATE_PEM (int): An X.509v3 certificate (`RFC5280 `_), + encoded in base64, and wrapped by ``-----BEGIN CERTIFICATE-----`` and + ``-----END CERTIFICATE-----``. + """ + UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT = 0 + X509_CERTIFICATE_PEM = 1 + + +class PublicKeyFormat(enum.IntEnum): + """ + The supported formats for the public key. + + Attributes: + UNSPECIFIED_PUBLIC_KEY_FORMAT (int): The format has not been specified. This is an invalid default value and + must not be used. + RSA_PEM (int): An RSA public key encoded in base64, and wrapped by + ``-----BEGIN PUBLIC KEY-----`` and ``-----END PUBLIC KEY-----``. This can be + used to verify ``RS256`` signatures in JWT tokens ([RFC7518]( + https://www.ietf.org/rfc/rfc7518.txt)). + RSA_X509_PEM (int): As RSA_PEM, but wrapped in an X.509v3 certificate ([RFC5280]( + https://www.ietf.org/rfc/rfc5280.txt)), encoded in base64, and wrapped by + ``-----BEGIN CERTIFICATE-----`` and ``-----END CERTIFICATE-----``. + ES256_PEM (int): Public key for the ECDSA algorithm using P-256 and SHA-256, encoded in + base64, and wrapped by ``-----BEGIN PUBLIC KEY-----`` and ``-----END + PUBLIC KEY-----``. This can be used to verify JWT tokens with the ``ES256`` + algorithm (`RFC7518 `_). This curve is + defined in `OpenSSL `_ as the ``prime256v1`` curve. + ES256_X509_PEM (int): As ES256_PEM, but wrapped in an X.509v3 certificate ([RFC5280]( + https://www.ietf.org/rfc/rfc5280.txt)), encoded in base64, and wrapped by + ``-----BEGIN CERTIFICATE-----`` and ``-----END CERTIFICATE-----``. + """ + UNSPECIFIED_PUBLIC_KEY_FORMAT = 0 + RSA_PEM = 3 + RSA_X509_PEM = 1 + ES256_PEM = 2 + ES256_X509_PEM = 4 diff --git a/iot/google/cloud/iot_v1/proto/__init__.py b/iot/google/cloud/iot_v1/proto/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/iot/google/cloud/iot_v1/proto/device_manager_pb2.py b/iot/google/cloud/iot_v1/proto/device_manager_pb2.py new file mode 100644 index 000000000000..55495c5a7b1e --- /dev/null +++ b/iot/google/cloud/iot_v1/proto/device_manager_pb2.py @@ -0,0 +1,1265 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/iot_v1/proto/device_manager.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.cloud.iot_v1.proto import resources_pb2 as google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2 +from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 +from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/iot_v1/proto/device_manager.proto', + package='google.cloud.iot.v1', + syntax='proto3', + serialized_pb=_b('\n.google/cloud/iot_v1/proto/device_manager.proto\x12\x13google.cloud.iot.v1\x1a\x1cgoogle/api/annotations.proto\x1a)google/cloud/iot_v1/proto/resources.proto\x1a\x1egoogle/iam/v1/iam_policy.proto\x1a\x1agoogle/iam/v1/policy.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"k\n\x1b\x43reateDeviceRegistryRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12<\n\x0f\x64\x65vice_registry\x18\x02 \x01(\x0b\x32#.google.cloud.iot.v1.DeviceRegistry\"(\n\x18GetDeviceRegistryRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"+\n\x1b\x44\x65leteDeviceRegistryRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x8c\x01\n\x1bUpdateDeviceRegistryRequest\x12<\n\x0f\x64\x65vice_registry\x18\x01 \x01(\x0b\x32#.google.cloud.iot.v1.DeviceRegistry\x12/\n\x0bupdate_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"T\n\x1bListDeviceRegistriesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\"w\n\x1cListDeviceRegistriesResponse\x12>\n\x11\x64\x65vice_registries\x18\x01 \x03(\x0b\x32#.google.cloud.iot.v1.DeviceRegistry\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"R\n\x13\x43reateDeviceRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12+\n\x06\x64\x65vice\x18\x02 \x01(\x0b\x32\x1b.google.cloud.iot.v1.Device\"P\n\x10GetDeviceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\nfield_mask\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"s\n\x13UpdateDeviceRequest\x12+\n\x06\x64\x65vice\x18\x02 \x01(\x0b\x32\x1b.google.cloud.iot.v1.Device\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\"#\n\x13\x44\x65leteDeviceRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\xa7\x01\n\x12ListDevicesRequest\x12\x0e\n\x06parent\x18\x01 \x01(\t\x12\x16\n\x0e\x64\x65vice_num_ids\x18\x02 \x03(\x04\x12\x12\n\ndevice_ids\x18\x03 \x03(\t\x12.\n\nfield_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x11\n\tpage_size\x18\x64 \x01(\x05\x12\x12\n\npage_token\x18\x65 \x01(\t\"\\\n\x13ListDevicesResponse\x12,\n\x07\x64\x65vices\x18\x01 \x03(\x0b\x32\x1b.google.cloud.iot.v1.Device\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t\"`\n ModifyCloudToDeviceConfigRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x11version_to_update\x18\x02 \x01(\x03\x12\x13\n\x0b\x62inary_data\x18\x03 \x01(\x0c\"E\n\x1fListDeviceConfigVersionsRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0cnum_versions\x18\x02 \x01(\x05\"]\n ListDeviceConfigVersionsResponse\x12\x39\n\x0e\x64\x65vice_configs\x18\x01 \x03(\x0b\x32!.google.cloud.iot.v1.DeviceConfig\";\n\x17ListDeviceStatesRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\nnum_states\x18\x02 \x01(\x05\"S\n\x18ListDeviceStatesResponse\x12\x37\n\rdevice_states\x18\x01 \x03(\x0b\x32 .google.cloud.iot.v1.DeviceState2\x93\x1c\n\rDeviceManager\x12\xb6\x01\n\x14\x43reateDeviceRegistry\x12\x30.google.cloud.iot.v1.CreateDeviceRegistryRequest\x1a#.google.cloud.iot.v1.DeviceRegistry\"G\x82\xd3\xe4\x93\x02\x41\"./v1/{parent=projects/*/locations/*}/registries:\x0f\x64\x65vice_registry\x12\x9f\x01\n\x11GetDeviceRegistry\x12-.google.cloud.iot.v1.GetDeviceRegistryRequest\x1a#.google.cloud.iot.v1.DeviceRegistry\"6\x82\xd3\xe4\x93\x02\x30\x12./v1/{name=projects/*/locations/*/registries/*}\x12\xc6\x01\n\x14UpdateDeviceRegistry\x12\x30.google.cloud.iot.v1.UpdateDeviceRegistryRequest\x1a#.google.cloud.iot.v1.DeviceRegistry\"W\x82\xd3\xe4\x93\x02Q2>/v1/{device_registry.name=projects/*/locations/*/registries/*}:\x0f\x64\x65vice_registry\x12\x98\x01\n\x14\x44\x65leteDeviceRegistry\x12\x30.google.cloud.iot.v1.DeleteDeviceRegistryRequest\x1a\x16.google.protobuf.Empty\"6\x82\xd3\xe4\x93\x02\x30*./v1/{name=projects/*/locations/*/registries/*}\x12\xb3\x01\n\x14ListDeviceRegistries\x12\x30.google.cloud.iot.v1.ListDeviceRegistriesRequest\x1a\x31.google.cloud.iot.v1.ListDeviceRegistriesResponse\"6\x82\xd3\xe4\x93\x02\x30\x12./v1/{parent=projects/*/locations/*}/registries\x12\x9f\x01\n\x0c\x43reateDevice\x12(.google.cloud.iot.v1.CreateDeviceRequest\x1a\x1b.google.cloud.iot.v1.Device\"H\x82\xd3\xe4\x93\x02\x42\"8/v1/{parent=projects/*/locations/*/registries/*}/devices:\x06\x64\x65vice\x12\xd7\x01\n\tGetDevice\x12%.google.cloud.iot.v1.GetDeviceRequest\x1a\x1b.google.cloud.iot.v1.Device\"\x85\x01\x82\xd3\xe4\x93\x02\x7f\x12\x38/v1/{name=projects/*/locations/*/registries/*/devices/*}ZC\x12\x41/v1/{name=projects/*/locations/*/registries/*/groups/*/devices/*}\x12\xfc\x01\n\x0cUpdateDevice\x12(.google.cloud.iot.v1.UpdateDeviceRequest\x1a\x1b.google.cloud.iot.v1.Device\"\xa4\x01\x82\xd3\xe4\x93\x02\x9d\x01\x32?/v1/{device.name=projects/*/locations/*/registries/*/devices/*}:\x06\x64\x65viceZR2H/v1/{device.name=projects/*/locations/*/registries/*/groups/*/devices/*}:\x06\x64\x65vice\x12\xd8\x01\n\x0c\x44\x65leteDevice\x12(.google.cloud.iot.v1.DeleteDeviceRequest\x1a\x16.google.protobuf.Empty\"\x85\x01\x82\xd3\xe4\x93\x02\x7f*8/v1/{name=projects/*/locations/*/registries/*/devices/*}ZC*A/v1/{name=projects/*/locations/*/registries/*/groups/*/devices/*}\x12\xda\x01\n\x0bListDevices\x12\'.google.cloud.iot.v1.ListDevicesRequest\x1a(.google.cloud.iot.v1.ListDevicesResponse\"x\x82\xd3\xe4\x93\x02r\x12\x38/v1/{parent=projects/*/locations/*/registries/*}/devicesZ6\x12\x34/v1/{parent=projects/*/locations/*/groups/*}/devices\x12\xb8\x02\n\x19ModifyCloudToDeviceConfig\x12\x35.google.cloud.iot.v1.ModifyCloudToDeviceConfigRequest\x1a!.google.cloud.iot.v1.DeviceConfig\"\xc0\x01\x82\xd3\xe4\x93\x02\xb9\x01\"R/v1/{name=projects/*/locations/*/registries/*/devices/*}:modifyCloudToDeviceConfig:\x01*Z`\"[/v1/{name=projects/*/locations/*/registries/*/groups/*/devices/*}:modifyCloudToDeviceConfig:\x01*\x12\xae\x02\n\x18ListDeviceConfigVersions\x12\x34.google.cloud.iot.v1.ListDeviceConfigVersionsRequest\x1a\x35.google.cloud.iot.v1.ListDeviceConfigVersionsResponse\"\xa4\x01\x82\xd3\xe4\x93\x02\x9d\x01\x12G/v1/{name=projects/*/locations/*/registries/*/devices/*}/configVersionsZR\x12P/v1/{name=projects/*/locations/*/registries/*/groups/*/devices/*}/configVersions\x12\x86\x02\n\x10ListDeviceStates\x12,.google.cloud.iot.v1.ListDeviceStatesRequest\x1a-.google.cloud.iot.v1.ListDeviceStatesResponse\"\x94\x01\x82\xd3\xe4\x93\x02\x8d\x01\x12?/v1/{name=projects/*/locations/*/registries/*/devices/*}/statesZJ\x12H/v1/{name=projects/*/locations/*/registries/*/groups/*/devices/*}/states\x12\xe6\x01\n\x0cSetIamPolicy\x12\".google.iam.v1.SetIamPolicyRequest\x1a\x15.google.iam.v1.Policy\"\x9a\x01\x82\xd3\xe4\x93\x02\x93\x01\"?/v1/{resource=projects/*/locations/*/registries/*}:setIamPolicy:\x01*ZM\"H/v1/{resource=projects/*/locations/*/registries/*/groups/*}:setIamPolicy:\x01*\x12\xe6\x01\n\x0cGetIamPolicy\x12\".google.iam.v1.GetIamPolicyRequest\x1a\x15.google.iam.v1.Policy\"\x9a\x01\x82\xd3\xe4\x93\x02\x93\x01\"?/v1/{resource=projects/*/locations/*/registries/*}:getIamPolicy:\x01*ZM\"H/v1/{resource=projects/*/locations/*/registries/*/groups/*}:getIamPolicy:\x01*\x12\x92\x02\n\x12TestIamPermissions\x12(.google.iam.v1.TestIamPermissionsRequest\x1a).google.iam.v1.TestIamPermissionsResponse\"\xa6\x01\x82\xd3\xe4\x93\x02\x9f\x01\"E/v1/{resource=projects/*/locations/*/registries/*}:testIamPermissions:\x01*ZS\"N/v1/{resource=projects/*/locations/*/registries/*/groups/*}:testIamPermissions:\x01*Bj\n\x17\x63om.google.cloud.iot.v1B\x12\x44\x65viceManagerProtoP\x01Z6google.golang.org/genproto/googleapis/cloud/iot/v1;iot\xf8\x01\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.DESCRIPTOR,google_dot_iam_dot_v1_dot_iam__policy__pb2.DESCRIPTOR,google_dot_iam_dot_v1_dot_policy__pb2.DESCRIPTOR,google_dot_protobuf_dot_duration__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,]) + + + + +_CREATEDEVICEREGISTRYREQUEST = _descriptor.Descriptor( + name='CreateDeviceRegistryRequest', + full_name='google.cloud.iot.v1.CreateDeviceRegistryRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.iot.v1.CreateDeviceRegistryRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='device_registry', full_name='google.cloud.iot.v1.CreateDeviceRegistryRequest.device_registry', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=332, + serialized_end=439, +) + + +_GETDEVICEREGISTRYREQUEST = _descriptor.Descriptor( + name='GetDeviceRegistryRequest', + full_name='google.cloud.iot.v1.GetDeviceRegistryRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.iot.v1.GetDeviceRegistryRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=441, + serialized_end=481, +) + + +_DELETEDEVICEREGISTRYREQUEST = _descriptor.Descriptor( + name='DeleteDeviceRegistryRequest', + full_name='google.cloud.iot.v1.DeleteDeviceRegistryRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.iot.v1.DeleteDeviceRegistryRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=483, + serialized_end=526, +) + + +_UPDATEDEVICEREGISTRYREQUEST = _descriptor.Descriptor( + name='UpdateDeviceRegistryRequest', + full_name='google.cloud.iot.v1.UpdateDeviceRegistryRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='device_registry', full_name='google.cloud.iot.v1.UpdateDeviceRegistryRequest.device_registry', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.cloud.iot.v1.UpdateDeviceRegistryRequest.update_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=529, + serialized_end=669, +) + + +_LISTDEVICEREGISTRIESREQUEST = _descriptor.Descriptor( + name='ListDeviceRegistriesRequest', + full_name='google.cloud.iot.v1.ListDeviceRegistriesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.iot.v1.ListDeviceRegistriesRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.cloud.iot.v1.ListDeviceRegistriesRequest.page_size', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.cloud.iot.v1.ListDeviceRegistriesRequest.page_token', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=671, + serialized_end=755, +) + + +_LISTDEVICEREGISTRIESRESPONSE = _descriptor.Descriptor( + name='ListDeviceRegistriesResponse', + full_name='google.cloud.iot.v1.ListDeviceRegistriesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='device_registries', full_name='google.cloud.iot.v1.ListDeviceRegistriesResponse.device_registries', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.cloud.iot.v1.ListDeviceRegistriesResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=757, + serialized_end=876, +) + + +_CREATEDEVICEREQUEST = _descriptor.Descriptor( + name='CreateDeviceRequest', + full_name='google.cloud.iot.v1.CreateDeviceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.iot.v1.CreateDeviceRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='device', full_name='google.cloud.iot.v1.CreateDeviceRequest.device', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=878, + serialized_end=960, +) + + +_GETDEVICEREQUEST = _descriptor.Descriptor( + name='GetDeviceRequest', + full_name='google.cloud.iot.v1.GetDeviceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.iot.v1.GetDeviceRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='field_mask', full_name='google.cloud.iot.v1.GetDeviceRequest.field_mask', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=962, + serialized_end=1042, +) + + +_UPDATEDEVICEREQUEST = _descriptor.Descriptor( + name='UpdateDeviceRequest', + full_name='google.cloud.iot.v1.UpdateDeviceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='device', full_name='google.cloud.iot.v1.UpdateDeviceRequest.device', index=0, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='update_mask', full_name='google.cloud.iot.v1.UpdateDeviceRequest.update_mask', index=1, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1044, + serialized_end=1159, +) + + +_DELETEDEVICEREQUEST = _descriptor.Descriptor( + name='DeleteDeviceRequest', + full_name='google.cloud.iot.v1.DeleteDeviceRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.iot.v1.DeleteDeviceRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1161, + serialized_end=1196, +) + + +_LISTDEVICESREQUEST = _descriptor.Descriptor( + name='ListDevicesRequest', + full_name='google.cloud.iot.v1.ListDevicesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='parent', full_name='google.cloud.iot.v1.ListDevicesRequest.parent', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='device_num_ids', full_name='google.cloud.iot.v1.ListDevicesRequest.device_num_ids', index=1, + number=2, type=4, cpp_type=4, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='device_ids', full_name='google.cloud.iot.v1.ListDevicesRequest.device_ids', index=2, + number=3, type=9, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='field_mask', full_name='google.cloud.iot.v1.ListDevicesRequest.field_mask', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_size', full_name='google.cloud.iot.v1.ListDevicesRequest.page_size', index=4, + number=100, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='page_token', full_name='google.cloud.iot.v1.ListDevicesRequest.page_token', index=5, + number=101, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1199, + serialized_end=1366, +) + + +_LISTDEVICESRESPONSE = _descriptor.Descriptor( + name='ListDevicesResponse', + full_name='google.cloud.iot.v1.ListDevicesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='devices', full_name='google.cloud.iot.v1.ListDevicesResponse.devices', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='next_page_token', full_name='google.cloud.iot.v1.ListDevicesResponse.next_page_token', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1368, + serialized_end=1460, +) + + +_MODIFYCLOUDTODEVICECONFIGREQUEST = _descriptor.Descriptor( + name='ModifyCloudToDeviceConfigRequest', + full_name='google.cloud.iot.v1.ModifyCloudToDeviceConfigRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.iot.v1.ModifyCloudToDeviceConfigRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='version_to_update', full_name='google.cloud.iot.v1.ModifyCloudToDeviceConfigRequest.version_to_update', index=1, + number=2, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='binary_data', full_name='google.cloud.iot.v1.ModifyCloudToDeviceConfigRequest.binary_data', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1462, + serialized_end=1558, +) + + +_LISTDEVICECONFIGVERSIONSREQUEST = _descriptor.Descriptor( + name='ListDeviceConfigVersionsRequest', + full_name='google.cloud.iot.v1.ListDeviceConfigVersionsRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.iot.v1.ListDeviceConfigVersionsRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_versions', full_name='google.cloud.iot.v1.ListDeviceConfigVersionsRequest.num_versions', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1560, + serialized_end=1629, +) + + +_LISTDEVICECONFIGVERSIONSRESPONSE = _descriptor.Descriptor( + name='ListDeviceConfigVersionsResponse', + full_name='google.cloud.iot.v1.ListDeviceConfigVersionsResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='device_configs', full_name='google.cloud.iot.v1.ListDeviceConfigVersionsResponse.device_configs', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1631, + serialized_end=1724, +) + + +_LISTDEVICESTATESREQUEST = _descriptor.Descriptor( + name='ListDeviceStatesRequest', + full_name='google.cloud.iot.v1.ListDeviceStatesRequest', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.iot.v1.ListDeviceStatesRequest.name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_states', full_name='google.cloud.iot.v1.ListDeviceStatesRequest.num_states', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1726, + serialized_end=1785, +) + + +_LISTDEVICESTATESRESPONSE = _descriptor.Descriptor( + name='ListDeviceStatesResponse', + full_name='google.cloud.iot.v1.ListDeviceStatesResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='device_states', full_name='google.cloud.iot.v1.ListDeviceStatesResponse.device_states', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1787, + serialized_end=1870, +) + +_CREATEDEVICEREGISTRYREQUEST.fields_by_name['device_registry'].message_type = google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICEREGISTRY +_UPDATEDEVICEREGISTRYREQUEST.fields_by_name['device_registry'].message_type = google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICEREGISTRY +_UPDATEDEVICEREGISTRYREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTDEVICEREGISTRIESRESPONSE.fields_by_name['device_registries'].message_type = google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICEREGISTRY +_CREATEDEVICEREQUEST.fields_by_name['device'].message_type = google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICE +_GETDEVICEREQUEST.fields_by_name['field_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_UPDATEDEVICEREQUEST.fields_by_name['device'].message_type = google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICE +_UPDATEDEVICEREQUEST.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTDEVICESREQUEST.fields_by_name['field_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK +_LISTDEVICESRESPONSE.fields_by_name['devices'].message_type = google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICE +_LISTDEVICECONFIGVERSIONSRESPONSE.fields_by_name['device_configs'].message_type = google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICECONFIG +_LISTDEVICESTATESRESPONSE.fields_by_name['device_states'].message_type = google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICESTATE +DESCRIPTOR.message_types_by_name['CreateDeviceRegistryRequest'] = _CREATEDEVICEREGISTRYREQUEST +DESCRIPTOR.message_types_by_name['GetDeviceRegistryRequest'] = _GETDEVICEREGISTRYREQUEST +DESCRIPTOR.message_types_by_name['DeleteDeviceRegistryRequest'] = _DELETEDEVICEREGISTRYREQUEST +DESCRIPTOR.message_types_by_name['UpdateDeviceRegistryRequest'] = _UPDATEDEVICEREGISTRYREQUEST +DESCRIPTOR.message_types_by_name['ListDeviceRegistriesRequest'] = _LISTDEVICEREGISTRIESREQUEST +DESCRIPTOR.message_types_by_name['ListDeviceRegistriesResponse'] = _LISTDEVICEREGISTRIESRESPONSE +DESCRIPTOR.message_types_by_name['CreateDeviceRequest'] = _CREATEDEVICEREQUEST +DESCRIPTOR.message_types_by_name['GetDeviceRequest'] = _GETDEVICEREQUEST +DESCRIPTOR.message_types_by_name['UpdateDeviceRequest'] = _UPDATEDEVICEREQUEST +DESCRIPTOR.message_types_by_name['DeleteDeviceRequest'] = _DELETEDEVICEREQUEST +DESCRIPTOR.message_types_by_name['ListDevicesRequest'] = _LISTDEVICESREQUEST +DESCRIPTOR.message_types_by_name['ListDevicesResponse'] = _LISTDEVICESRESPONSE +DESCRIPTOR.message_types_by_name['ModifyCloudToDeviceConfigRequest'] = _MODIFYCLOUDTODEVICECONFIGREQUEST +DESCRIPTOR.message_types_by_name['ListDeviceConfigVersionsRequest'] = _LISTDEVICECONFIGVERSIONSREQUEST +DESCRIPTOR.message_types_by_name['ListDeviceConfigVersionsResponse'] = _LISTDEVICECONFIGVERSIONSRESPONSE +DESCRIPTOR.message_types_by_name['ListDeviceStatesRequest'] = _LISTDEVICESTATESREQUEST +DESCRIPTOR.message_types_by_name['ListDeviceStatesResponse'] = _LISTDEVICESTATESRESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +CreateDeviceRegistryRequest = _reflection.GeneratedProtocolMessageType('CreateDeviceRegistryRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATEDEVICEREGISTRYREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``CreateDeviceRegistry``. + + + Attributes: + parent: + The project and cloud region where this device registry must + be created. For example, ``projects/example- + project/locations/us-central1``. + device_registry: + The device registry. The field ``name`` must be empty. The + server will generate that field from the device registry + ``id`` provided and the ``parent`` field. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.CreateDeviceRegistryRequest) + )) +_sym_db.RegisterMessage(CreateDeviceRegistryRequest) + +GetDeviceRegistryRequest = _reflection.GeneratedProtocolMessageType('GetDeviceRegistryRequest', (_message.Message,), dict( + DESCRIPTOR = _GETDEVICEREGISTRYREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``GetDeviceRegistry``. + + + Attributes: + name: + The name of the device registry. For example, + ``projects/example-project/locations/us- + central1/registries/my-registry``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.GetDeviceRegistryRequest) + )) +_sym_db.RegisterMessage(GetDeviceRegistryRequest) + +DeleteDeviceRegistryRequest = _reflection.GeneratedProtocolMessageType('DeleteDeviceRegistryRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETEDEVICEREGISTRYREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``DeleteDeviceRegistry``. + + + Attributes: + name: + The name of the device registry. For example, + ``projects/example-project/locations/us- + central1/registries/my-registry``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.DeleteDeviceRegistryRequest) + )) +_sym_db.RegisterMessage(DeleteDeviceRegistryRequest) + +UpdateDeviceRegistryRequest = _reflection.GeneratedProtocolMessageType('UpdateDeviceRegistryRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATEDEVICEREGISTRYREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``UpdateDeviceRegistry``. + + + Attributes: + device_registry: + The new values for the device registry. The ``id`` field must + be empty, and the ``name`` field must indicate the path of the + resource. For example, ``projects/example- + project/locations/us-central1/registries/my-registry``. + update_mask: + Only updates the ``device_registry`` fields indicated by this + mask. The field mask must not be empty, and it must not + contain fields that are immutable or only set by the server. + Mutable top-level fields: ``event_notification_config``, + ``http_config``, ``mqtt_config``, and + ``state_notification_config``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.UpdateDeviceRegistryRequest) + )) +_sym_db.RegisterMessage(UpdateDeviceRegistryRequest) + +ListDeviceRegistriesRequest = _reflection.GeneratedProtocolMessageType('ListDeviceRegistriesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTDEVICEREGISTRIESREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``ListDeviceRegistries``. + + + Attributes: + parent: + The project and cloud region path. For example, + ``projects/example-project/locations/us-central1``. + page_size: + The maximum number of registries to return in the response. If + this value is zero, the service will select a default size. A + call may return fewer objects than requested, but if there is + a non-empty ``page_token``, it indicates that more entries are + available. + page_token: + The value returned by the last + ``ListDeviceRegistriesResponse``; indicates that this is a + continuation of a prior ``ListDeviceRegistries`` call, and + that the system should return the next page of data. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.ListDeviceRegistriesRequest) + )) +_sym_db.RegisterMessage(ListDeviceRegistriesRequest) + +ListDeviceRegistriesResponse = _reflection.GeneratedProtocolMessageType('ListDeviceRegistriesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTDEVICEREGISTRIESRESPONSE, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Response for ``ListDeviceRegistries``. + + + Attributes: + device_registries: + The registries that matched the query. + next_page_token: + If not empty, indicates that there may be more registries that + match the request; this value should be passed in a new + ``ListDeviceRegistriesRequest``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.ListDeviceRegistriesResponse) + )) +_sym_db.RegisterMessage(ListDeviceRegistriesResponse) + +CreateDeviceRequest = _reflection.GeneratedProtocolMessageType('CreateDeviceRequest', (_message.Message,), dict( + DESCRIPTOR = _CREATEDEVICEREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``CreateDevice``. + + + Attributes: + parent: + The name of the device registry where this device should be + created. For example, ``projects/example-project/locations/us- + central1/registries/my-registry``. + device: + The device registration details. The field ``name`` must be + empty. The server will generate that field from the device + registry ``id`` provided and the ``parent`` field. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.CreateDeviceRequest) + )) +_sym_db.RegisterMessage(CreateDeviceRequest) + +GetDeviceRequest = _reflection.GeneratedProtocolMessageType('GetDeviceRequest', (_message.Message,), dict( + DESCRIPTOR = _GETDEVICEREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``GetDevice``. + + + Attributes: + name: + The name of the device. For example, + ``projects/p0/locations/us- + central1/registries/registry0/devices/device0`` or + ``projects/p0/locations/us- + central1/registries/registry0/devices/{num_id}``. + field_mask: + The fields of the ``Device`` resource to be returned in the + response. If the field mask is unset or empty, all fields are + returned. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.GetDeviceRequest) + )) +_sym_db.RegisterMessage(GetDeviceRequest) + +UpdateDeviceRequest = _reflection.GeneratedProtocolMessageType('UpdateDeviceRequest', (_message.Message,), dict( + DESCRIPTOR = _UPDATEDEVICEREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``UpdateDevice``. + + + Attributes: + device: + The new values for the device registry. The ``id`` and + ``num_id`` fields must be empty, and the field ``name`` must + specify the name path. For example, + ``projects/p0/locations/us- + central1/registries/registry0/devices/device0``\ or + ``projects/p0/locations/us- + central1/registries/registry0/devices/{num_id}``. + update_mask: + Only updates the ``device`` fields indicated by this mask. The + field mask must not be empty, and it must not contain fields + that are immutable or only set by the server. Mutable top- + level fields: ``credentials``, ``blocked``, and ``metadata`` + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.UpdateDeviceRequest) + )) +_sym_db.RegisterMessage(UpdateDeviceRequest) + +DeleteDeviceRequest = _reflection.GeneratedProtocolMessageType('DeleteDeviceRequest', (_message.Message,), dict( + DESCRIPTOR = _DELETEDEVICEREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``DeleteDevice``. + + + Attributes: + name: + The name of the device. For example, + ``projects/p0/locations/us- + central1/registries/registry0/devices/device0`` or + ``projects/p0/locations/us- + central1/registries/registry0/devices/{num_id}``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.DeleteDeviceRequest) + )) +_sym_db.RegisterMessage(DeleteDeviceRequest) + +ListDevicesRequest = _reflection.GeneratedProtocolMessageType('ListDevicesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTDEVICESREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``ListDevices``. + + + Attributes: + parent: + The device registry path. Required. For example, + ``projects/my-project/locations/us-central1/registries/my- + registry``. + device_num_ids: + A list of device numerical ids. If empty, it will ignore this + field. This field cannot hold more than 10,000 entries. + device_ids: + A list of device string identifiers. If empty, it will ignore + this field. For example, ``['device0', 'device12']``. This + field cannot hold more than 10,000 entries. + field_mask: + The fields of the ``Device`` resource to be returned in the + response. The fields ``id``, and ``num_id`` are always + returned by default, along with any other fields specified. + page_size: + The maximum number of devices to return in the response. If + this value is zero, the service will select a default size. A + call may return fewer objects than requested, but if there is + a non-empty ``page_token``, it indicates that more entries are + available. + page_token: + The value returned by the last ``ListDevicesResponse``; + indicates that this is a continuation of a prior + ``ListDevices`` call, and that the system should return the + next page of data. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.ListDevicesRequest) + )) +_sym_db.RegisterMessage(ListDevicesRequest) + +ListDevicesResponse = _reflection.GeneratedProtocolMessageType('ListDevicesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTDEVICESRESPONSE, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Response for ``ListDevices``. + + + Attributes: + devices: + The devices that match the request. + next_page_token: + If not empty, indicates that there may be more devices that + match the request; this value should be passed in a new + ``ListDevicesRequest``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.ListDevicesResponse) + )) +_sym_db.RegisterMessage(ListDevicesResponse) + +ModifyCloudToDeviceConfigRequest = _reflection.GeneratedProtocolMessageType('ModifyCloudToDeviceConfigRequest', (_message.Message,), dict( + DESCRIPTOR = _MODIFYCLOUDTODEVICECONFIGREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``ModifyCloudToDeviceConfig``. + + + Attributes: + name: + The name of the device. For example, + ``projects/p0/locations/us- + central1/registries/registry0/devices/device0`` or + ``projects/p0/locations/us- + central1/registries/registry0/devices/{num_id}``. + version_to_update: + The version number to update. If this value is zero, it will + not check the version number of the server and will always + update the current version; otherwise, this update will fail + if the version number found on the server does not match this + version number. This is used to support multiple simultaneous + updates without losing data. + binary_data: + The configuration data for the device. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.ModifyCloudToDeviceConfigRequest) + )) +_sym_db.RegisterMessage(ModifyCloudToDeviceConfigRequest) + +ListDeviceConfigVersionsRequest = _reflection.GeneratedProtocolMessageType('ListDeviceConfigVersionsRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTDEVICECONFIGVERSIONSREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``ListDeviceConfigVersions``. + + + Attributes: + name: + The name of the device. For example, + ``projects/p0/locations/us- + central1/registries/registry0/devices/device0`` or + ``projects/p0/locations/us- + central1/registries/registry0/devices/{num_id}``. + num_versions: + The number of versions to list. Versions are listed in + decreasing order of the version number. The maximum number of + versions retained is 10. If this value is zero, it will return + all the versions available. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.ListDeviceConfigVersionsRequest) + )) +_sym_db.RegisterMessage(ListDeviceConfigVersionsRequest) + +ListDeviceConfigVersionsResponse = _reflection.GeneratedProtocolMessageType('ListDeviceConfigVersionsResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTDEVICECONFIGVERSIONSRESPONSE, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Response for ``ListDeviceConfigVersions``. + + + Attributes: + device_configs: + The device configuration for the last few versions. Versions + are listed in decreasing order, starting from the most recent + one. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.ListDeviceConfigVersionsResponse) + )) +_sym_db.RegisterMessage(ListDeviceConfigVersionsResponse) + +ListDeviceStatesRequest = _reflection.GeneratedProtocolMessageType('ListDeviceStatesRequest', (_message.Message,), dict( + DESCRIPTOR = _LISTDEVICESTATESREQUEST, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Request for ``ListDeviceStates``. + + + Attributes: + name: + The name of the device. For example, + ``projects/p0/locations/us- + central1/registries/registry0/devices/device0`` or + ``projects/p0/locations/us- + central1/registries/registry0/devices/{num_id}``. + num_states: + The number of states to list. States are listed in descending + order of update time. The maximum number of states retained is + 10. If this value is zero, it will return all the states + available. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.ListDeviceStatesRequest) + )) +_sym_db.RegisterMessage(ListDeviceStatesRequest) + +ListDeviceStatesResponse = _reflection.GeneratedProtocolMessageType('ListDeviceStatesResponse', (_message.Message,), dict( + DESCRIPTOR = _LISTDEVICESTATESRESPONSE, + __module__ = 'google.cloud.iot_v1.proto.device_manager_pb2' + , + __doc__ = """Response for ``ListDeviceStates``. + + + Attributes: + device_states: + The last few device states. States are listed in descending + order of server update time, starting from the most recent + one. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.ListDeviceStatesResponse) + )) +_sym_db.RegisterMessage(ListDeviceStatesResponse) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.cloud.iot.v1B\022DeviceManagerProtoP\001Z6google.golang.org/genproto/googleapis/cloud/iot/v1;iot\370\001\001')) + +_DEVICEMANAGER = _descriptor.ServiceDescriptor( + name='DeviceManager', + full_name='google.cloud.iot.v1.DeviceManager', + file=DESCRIPTOR, + index=0, + options=None, + serialized_start=1873, + serialized_end=5476, + methods=[ + _descriptor.MethodDescriptor( + name='CreateDeviceRegistry', + full_name='google.cloud.iot.v1.DeviceManager.CreateDeviceRegistry', + index=0, + containing_service=None, + input_type=_CREATEDEVICEREGISTRYREQUEST, + output_type=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICEREGISTRY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002A\"./v1/{parent=projects/*/locations/*}/registries:\017device_registry')), + ), + _descriptor.MethodDescriptor( + name='GetDeviceRegistry', + full_name='google.cloud.iot.v1.DeviceManager.GetDeviceRegistry', + index=1, + containing_service=None, + input_type=_GETDEVICEREGISTRYREQUEST, + output_type=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICEREGISTRY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0020\022./v1/{name=projects/*/locations/*/registries/*}')), + ), + _descriptor.MethodDescriptor( + name='UpdateDeviceRegistry', + full_name='google.cloud.iot.v1.DeviceManager.UpdateDeviceRegistry', + index=2, + containing_service=None, + input_type=_UPDATEDEVICEREGISTRYREQUEST, + output_type=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICEREGISTRY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002Q2>/v1/{device_registry.name=projects/*/locations/*/registries/*}:\017device_registry')), + ), + _descriptor.MethodDescriptor( + name='DeleteDeviceRegistry', + full_name='google.cloud.iot.v1.DeviceManager.DeleteDeviceRegistry', + index=3, + containing_service=None, + input_type=_DELETEDEVICEREGISTRYREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0020*./v1/{name=projects/*/locations/*/registries/*}')), + ), + _descriptor.MethodDescriptor( + name='ListDeviceRegistries', + full_name='google.cloud.iot.v1.DeviceManager.ListDeviceRegistries', + index=4, + containing_service=None, + input_type=_LISTDEVICEREGISTRIESREQUEST, + output_type=_LISTDEVICEREGISTRIESRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\0020\022./v1/{parent=projects/*/locations/*}/registries')), + ), + _descriptor.MethodDescriptor( + name='CreateDevice', + full_name='google.cloud.iot.v1.DeviceManager.CreateDevice', + index=5, + containing_service=None, + input_type=_CREATEDEVICEREQUEST, + output_type=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002B\"8/v1/{parent=projects/*/locations/*/registries/*}/devices:\006device')), + ), + _descriptor.MethodDescriptor( + name='GetDevice', + full_name='google.cloud.iot.v1.DeviceManager.GetDevice', + index=6, + containing_service=None, + input_type=_GETDEVICEREQUEST, + output_type=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\177\0228/v1/{name=projects/*/locations/*/registries/*/devices/*}ZC\022A/v1/{name=projects/*/locations/*/registries/*/groups/*/devices/*}')), + ), + _descriptor.MethodDescriptor( + name='UpdateDevice', + full_name='google.cloud.iot.v1.DeviceManager.UpdateDevice', + index=7, + containing_service=None, + input_type=_UPDATEDEVICEREQUEST, + output_type=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\235\0012?/v1/{device.name=projects/*/locations/*/registries/*/devices/*}:\006deviceZR2H/v1/{device.name=projects/*/locations/*/registries/*/groups/*/devices/*}:\006device')), + ), + _descriptor.MethodDescriptor( + name='DeleteDevice', + full_name='google.cloud.iot.v1.DeviceManager.DeleteDevice', + index=8, + containing_service=None, + input_type=_DELETEDEVICEREQUEST, + output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\177*8/v1/{name=projects/*/locations/*/registries/*/devices/*}ZC*A/v1/{name=projects/*/locations/*/registries/*/groups/*/devices/*}')), + ), + _descriptor.MethodDescriptor( + name='ListDevices', + full_name='google.cloud.iot.v1.DeviceManager.ListDevices', + index=9, + containing_service=None, + input_type=_LISTDEVICESREQUEST, + output_type=_LISTDEVICESRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002r\0228/v1/{parent=projects/*/locations/*/registries/*}/devicesZ6\0224/v1/{parent=projects/*/locations/*/groups/*}/devices')), + ), + _descriptor.MethodDescriptor( + name='ModifyCloudToDeviceConfig', + full_name='google.cloud.iot.v1.DeviceManager.ModifyCloudToDeviceConfig', + index=10, + containing_service=None, + input_type=_MODIFYCLOUDTODEVICECONFIGREQUEST, + output_type=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2._DEVICECONFIG, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\271\001\"R/v1/{name=projects/*/locations/*/registries/*/devices/*}:modifyCloudToDeviceConfig:\001*Z`\"[/v1/{name=projects/*/locations/*/registries/*/groups/*/devices/*}:modifyCloudToDeviceConfig:\001*')), + ), + _descriptor.MethodDescriptor( + name='ListDeviceConfigVersions', + full_name='google.cloud.iot.v1.DeviceManager.ListDeviceConfigVersions', + index=11, + containing_service=None, + input_type=_LISTDEVICECONFIGVERSIONSREQUEST, + output_type=_LISTDEVICECONFIGVERSIONSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\235\001\022G/v1/{name=projects/*/locations/*/registries/*/devices/*}/configVersionsZR\022P/v1/{name=projects/*/locations/*/registries/*/groups/*/devices/*}/configVersions')), + ), + _descriptor.MethodDescriptor( + name='ListDeviceStates', + full_name='google.cloud.iot.v1.DeviceManager.ListDeviceStates', + index=12, + containing_service=None, + input_type=_LISTDEVICESTATESREQUEST, + output_type=_LISTDEVICESTATESRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\215\001\022?/v1/{name=projects/*/locations/*/registries/*/devices/*}/statesZJ\022H/v1/{name=projects/*/locations/*/registries/*/groups/*/devices/*}/states')), + ), + _descriptor.MethodDescriptor( + name='SetIamPolicy', + full_name='google.cloud.iot.v1.DeviceManager.SetIamPolicy', + index=13, + containing_service=None, + input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._SETIAMPOLICYREQUEST, + output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\223\001\"?/v1/{resource=projects/*/locations/*/registries/*}:setIamPolicy:\001*ZM\"H/v1/{resource=projects/*/locations/*/registries/*/groups/*}:setIamPolicy:\001*')), + ), + _descriptor.MethodDescriptor( + name='GetIamPolicy', + full_name='google.cloud.iot.v1.DeviceManager.GetIamPolicy', + index=14, + containing_service=None, + input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._GETIAMPOLICYREQUEST, + output_type=google_dot_iam_dot_v1_dot_policy__pb2._POLICY, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\223\001\"?/v1/{resource=projects/*/locations/*/registries/*}:getIamPolicy:\001*ZM\"H/v1/{resource=projects/*/locations/*/registries/*/groups/*}:getIamPolicy:\001*')), + ), + _descriptor.MethodDescriptor( + name='TestIamPermissions', + full_name='google.cloud.iot.v1.DeviceManager.TestIamPermissions', + index=15, + containing_service=None, + input_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSREQUEST, + output_type=google_dot_iam_dot_v1_dot_iam__policy__pb2._TESTIAMPERMISSIONSRESPONSE, + options=_descriptor._ParseOptions(descriptor_pb2.MethodOptions(), _b('\202\323\344\223\002\237\001\"E/v1/{resource=projects/*/locations/*/registries/*}:testIamPermissions:\001*ZS\"N/v1/{resource=projects/*/locations/*/registries/*/groups/*}:testIamPermissions:\001*')), + ), +]) +_sym_db.RegisterServiceDescriptor(_DEVICEMANAGER) + +DESCRIPTOR.services_by_name['DeviceManager'] = _DEVICEMANAGER + +# @@protoc_insertion_point(module_scope) diff --git a/iot/google/cloud/iot_v1/proto/device_manager_pb2_grpc.py b/iot/google/cloud/iot_v1/proto/device_manager_pb2_grpc.py new file mode 100644 index 000000000000..7dcf8a4572bf --- /dev/null +++ b/iot/google/cloud/iot_v1/proto/device_manager_pb2_grpc.py @@ -0,0 +1,316 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + +from google.cloud.iot_v1.proto import device_manager_pb2 as google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2 +from google.cloud.iot_v1.proto import resources_pb2 as google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2 +from google.iam.v1 import iam_policy_pb2 as google_dot_iam_dot_v1_dot_iam__policy__pb2 +from google.iam.v1 import policy_pb2 as google_dot_iam_dot_v1_dot_policy__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class DeviceManagerStub(object): + """Internet of things (IoT) service. Allows to manipulate device registry + instances and the registration of devices (Things) to the cloud. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.CreateDeviceRegistry = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/CreateDeviceRegistry', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.CreateDeviceRegistryRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.DeviceRegistry.FromString, + ) + self.GetDeviceRegistry = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/GetDeviceRegistry', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.GetDeviceRegistryRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.DeviceRegistry.FromString, + ) + self.UpdateDeviceRegistry = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/UpdateDeviceRegistry', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.UpdateDeviceRegistryRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.DeviceRegistry.FromString, + ) + self.DeleteDeviceRegistry = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/DeleteDeviceRegistry', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.DeleteDeviceRegistryRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListDeviceRegistries = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/ListDeviceRegistries', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceRegistriesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceRegistriesResponse.FromString, + ) + self.CreateDevice = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/CreateDevice', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.CreateDeviceRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.Device.FromString, + ) + self.GetDevice = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/GetDevice', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.GetDeviceRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.Device.FromString, + ) + self.UpdateDevice = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/UpdateDevice', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.UpdateDeviceRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.Device.FromString, + ) + self.DeleteDevice = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/DeleteDevice', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.DeleteDeviceRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + ) + self.ListDevices = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/ListDevices', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDevicesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDevicesResponse.FromString, + ) + self.ModifyCloudToDeviceConfig = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/ModifyCloudToDeviceConfig', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ModifyCloudToDeviceConfigRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.DeviceConfig.FromString, + ) + self.ListDeviceConfigVersions = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/ListDeviceConfigVersions', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceConfigVersionsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceConfigVersionsResponse.FromString, + ) + self.ListDeviceStates = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/ListDeviceStates', + request_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceStatesRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceStatesResponse.FromString, + ) + self.SetIamPolicy = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/SetIamPolicy', + request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.SerializeToString, + response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, + ) + self.GetIamPolicy = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/GetIamPolicy', + request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.SerializeToString, + response_deserializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.FromString, + ) + self.TestIamPermissions = channel.unary_unary( + '/google.cloud.iot.v1.DeviceManager/TestIamPermissions', + request_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.SerializeToString, + response_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.FromString, + ) + + +class DeviceManagerServicer(object): + """Internet of things (IoT) service. Allows to manipulate device registry + instances and the registration of devices (Things) to the cloud. + """ + + def CreateDeviceRegistry(self, request, context): + """Creates a device registry that contains devices. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetDeviceRegistry(self, request, context): + """Gets a device registry configuration. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateDeviceRegistry(self, request, context): + """Updates a device registry configuration. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteDeviceRegistry(self, request, context): + """Deletes a device registry configuration. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListDeviceRegistries(self, request, context): + """Lists device registries. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateDevice(self, request, context): + """Creates a device in a device registry. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetDevice(self, request, context): + """Gets details about a device. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def UpdateDevice(self, request, context): + """Updates a device. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteDevice(self, request, context): + """Deletes a device. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListDevices(self, request, context): + """List devices in a device registry. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ModifyCloudToDeviceConfig(self, request, context): + """Modifies the configuration for the device, which is eventually sent from + the Cloud IoT Core servers. Returns the modified configuration version and + its metadata. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListDeviceConfigVersions(self, request, context): + """Lists the last few versions of the device configuration in descending + order (i.e.: newest first). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListDeviceStates(self, request, context): + """Lists the last few versions of the device state in descending order (i.e.: + newest first). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SetIamPolicy(self, request, context): + """Sets the access control policy on the specified resource. Replaces any + existing policy. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetIamPolicy(self, request, context): + """Gets the access control policy for a resource. + Returns an empty policy if the resource exists and does not have a policy + set. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def TestIamPermissions(self, request, context): + """Returns permissions that a caller has on the specified resource. + If the resource does not exist, this will return an empty set of + permissions, not a NOT_FOUND error. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_DeviceManagerServicer_to_server(servicer, server): + rpc_method_handlers = { + 'CreateDeviceRegistry': grpc.unary_unary_rpc_method_handler( + servicer.CreateDeviceRegistry, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.CreateDeviceRegistryRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.DeviceRegistry.SerializeToString, + ), + 'GetDeviceRegistry': grpc.unary_unary_rpc_method_handler( + servicer.GetDeviceRegistry, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.GetDeviceRegistryRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.DeviceRegistry.SerializeToString, + ), + 'UpdateDeviceRegistry': grpc.unary_unary_rpc_method_handler( + servicer.UpdateDeviceRegistry, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.UpdateDeviceRegistryRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.DeviceRegistry.SerializeToString, + ), + 'DeleteDeviceRegistry': grpc.unary_unary_rpc_method_handler( + servicer.DeleteDeviceRegistry, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.DeleteDeviceRegistryRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListDeviceRegistries': grpc.unary_unary_rpc_method_handler( + servicer.ListDeviceRegistries, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceRegistriesRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceRegistriesResponse.SerializeToString, + ), + 'CreateDevice': grpc.unary_unary_rpc_method_handler( + servicer.CreateDevice, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.CreateDeviceRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.Device.SerializeToString, + ), + 'GetDevice': grpc.unary_unary_rpc_method_handler( + servicer.GetDevice, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.GetDeviceRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.Device.SerializeToString, + ), + 'UpdateDevice': grpc.unary_unary_rpc_method_handler( + servicer.UpdateDevice, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.UpdateDeviceRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.Device.SerializeToString, + ), + 'DeleteDevice': grpc.unary_unary_rpc_method_handler( + servicer.DeleteDevice, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.DeleteDeviceRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + 'ListDevices': grpc.unary_unary_rpc_method_handler( + servicer.ListDevices, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDevicesRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDevicesResponse.SerializeToString, + ), + 'ModifyCloudToDeviceConfig': grpc.unary_unary_rpc_method_handler( + servicer.ModifyCloudToDeviceConfig, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ModifyCloudToDeviceConfigRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_resources__pb2.DeviceConfig.SerializeToString, + ), + 'ListDeviceConfigVersions': grpc.unary_unary_rpc_method_handler( + servicer.ListDeviceConfigVersions, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceConfigVersionsRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceConfigVersionsResponse.SerializeToString, + ), + 'ListDeviceStates': grpc.unary_unary_rpc_method_handler( + servicer.ListDeviceStates, + request_deserializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceStatesRequest.FromString, + response_serializer=google_dot_cloud_dot_iot__v1_dot_proto_dot_device__manager__pb2.ListDeviceStatesResponse.SerializeToString, + ), + 'SetIamPolicy': grpc.unary_unary_rpc_method_handler( + servicer.SetIamPolicy, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.SetIamPolicyRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, + ), + 'GetIamPolicy': grpc.unary_unary_rpc_method_handler( + servicer.GetIamPolicy, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.GetIamPolicyRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_policy__pb2.Policy.SerializeToString, + ), + 'TestIamPermissions': grpc.unary_unary_rpc_method_handler( + servicer.TestIamPermissions, + request_deserializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsRequest.FromString, + response_serializer=google_dot_iam_dot_v1_dot_iam__policy__pb2.TestIamPermissionsResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'google.cloud.iot.v1.DeviceManager', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) diff --git a/iot/google/cloud/iot_v1/proto/resources_pb2.py b/iot/google/cloud/iot_v1/proto/resources_pb2.py new file mode 100644 index 000000000000..fa5d8d16a96b --- /dev/null +++ b/iot/google/cloud/iot_v1/proto/resources_pb2.py @@ -0,0 +1,1299 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/cloud/iot_v1/proto/resources.proto + +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='google/cloud/iot_v1/proto/resources.proto', + package='google.cloud.iot.v1', + syntax='proto3', + serialized_pb=_b('\n)google/cloud/iot_v1/proto/resources.proto\x12\x13google.cloud.iot.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\"\xcd\x05\n\x06\x44\x65vice\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06num_id\x18\x03 \x01(\x04\x12:\n\x0b\x63redentials\x18\x0c \x03(\x0b\x32%.google.cloud.iot.v1.DeviceCredential\x12\x37\n\x13last_heartbeat_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x0flast_event_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x0flast_state_time\x18\x14 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x38\n\x14last_config_ack_time\x18\x0e \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x39\n\x15last_config_send_time\x18\x12 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07\x62locked\x18\x13 \x01(\x08\x12\x33\n\x0flast_error_time\x18\n \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12-\n\x11last_error_status\x18\x0b \x01(\x0b\x32\x12.google.rpc.Status\x12\x31\n\x06\x63onfig\x18\r \x01(\x0b\x32!.google.cloud.iot.v1.DeviceConfig\x12/\n\x05state\x18\x10 \x01(\x0b\x32 .google.cloud.iot.v1.DeviceState\x12;\n\x08metadata\x18\x11 \x03(\x0b\x32).google.cloud.iot.v1.Device.MetadataEntry\x1a/\n\rMetadataEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xf7\x02\n\x0e\x44\x65viceRegistry\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12P\n\x1a\x65vent_notification_configs\x18\n \x03(\x0b\x32,.google.cloud.iot.v1.EventNotificationConfig\x12O\n\x19state_notification_config\x18\x07 \x01(\x0b\x32,.google.cloud.iot.v1.StateNotificationConfig\x12\x34\n\x0bmqtt_config\x18\x04 \x01(\x0b\x32\x1f.google.cloud.iot.v1.MqttConfig\x12\x34\n\x0bhttp_config\x18\t \x01(\x0b\x32\x1f.google.cloud.iot.v1.HttpConfig\x12<\n\x0b\x63redentials\x18\x08 \x03(\x0b\x32\'.google.cloud.iot.v1.RegistryCredential\"H\n\nMqttConfig\x12:\n\x12mqtt_enabled_state\x18\x01 \x01(\x0e\x32\x1e.google.cloud.iot.v1.MqttState\"H\n\nHttpConfig\x12:\n\x12http_enabled_state\x18\x01 \x01(\x0e\x32\x1e.google.cloud.iot.v1.HttpState\"O\n\x17\x45ventNotificationConfig\x12\x19\n\x11subfolder_matches\x18\x02 \x01(\t\x12\x19\n\x11pubsub_topic_name\x18\x01 \x01(\t\"4\n\x17StateNotificationConfig\x12\x19\n\x11pubsub_topic_name\x18\x01 \x01(\t\"o\n\x12RegistryCredential\x12K\n\x16public_key_certificate\x18\x01 \x01(\x0b\x32).google.cloud.iot.v1.PublicKeyCertificateH\x00\x42\x0c\n\ncredential\"\xd0\x01\n\x16X509CertificateDetails\x12\x0e\n\x06issuer\x18\x01 \x01(\t\x12\x0f\n\x07subject\x18\x02 \x01(\t\x12.\n\nstart_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12/\n\x0b\x65xpiry_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1b\n\x13signature_algorithm\x18\x05 \x01(\t\x12\x17\n\x0fpublic_key_type\x18\x06 \x01(\t\"\xaf\x01\n\x14PublicKeyCertificate\x12?\n\x06\x66ormat\x18\x01 \x01(\x0e\x32/.google.cloud.iot.v1.PublicKeyCertificateFormat\x12\x13\n\x0b\x63\x65rtificate\x18\x02 \x01(\t\x12\x41\n\x0cx509_details\x18\x03 \x01(\x0b\x32+.google.cloud.iot.v1.X509CertificateDetails\"\x95\x01\n\x10\x44\x65viceCredential\x12>\n\npublic_key\x18\x02 \x01(\x0b\x32(.google.cloud.iot.v1.PublicKeyCredentialH\x00\x12\x33\n\x0f\x65xpiration_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x0c\n\ncredential\"X\n\x13PublicKeyCredential\x12\x34\n\x06\x66ormat\x18\x01 \x01(\x0e\x32$.google.cloud.iot.v1.PublicKeyFormat\x12\x0b\n\x03key\x18\x02 \x01(\t\"\xa0\x01\n\x0c\x44\x65viceConfig\x12\x0f\n\x07version\x18\x01 \x01(\x03\x12\x35\n\x11\x63loud_update_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x33\n\x0f\x64\x65vice_ack_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x13\n\x0b\x62inary_data\x18\x04 \x01(\x0c\"S\n\x0b\x44\x65viceState\x12/\n\x0bupdate_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x13\n\x0b\x62inary_data\x18\x02 \x01(\x0c*L\n\tMqttState\x12\x1a\n\x16MQTT_STATE_UNSPECIFIED\x10\x00\x12\x10\n\x0cMQTT_ENABLED\x10\x01\x12\x11\n\rMQTT_DISABLED\x10\x02*L\n\tHttpState\x12\x1a\n\x16HTTP_STATE_UNSPECIFIED\x10\x00\x12\x10\n\x0cHTTP_ENABLED\x10\x01\x12\x11\n\rHTTP_DISABLED\x10\x02*e\n\x1aPublicKeyCertificateFormat\x12-\n)UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT\x10\x00\x12\x18\n\x14X509_CERTIFICATE_PEM\x10\x01*v\n\x0fPublicKeyFormat\x12!\n\x1dUNSPECIFIED_PUBLIC_KEY_FORMAT\x10\x00\x12\x0b\n\x07RSA_PEM\x10\x03\x12\x10\n\x0cRSA_X509_PEM\x10\x01\x12\r\n\tES256_PEM\x10\x02\x12\x12\n\x0e\x45S256_X509_PEM\x10\x04\x42\x66\n\x17\x63om.google.cloud.iot.v1B\x0eResourcesProtoP\x01Z6google.golang.org/genproto/googleapis/cloud/iot/v1;iot\xf8\x01\x01\x62\x06proto3') + , + dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,]) + +_MQTTSTATE = _descriptor.EnumDescriptor( + name='MqttState', + full_name='google.cloud.iot.v1.MqttState', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='MQTT_STATE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MQTT_ENABLED', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MQTT_DISABLED', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2527, + serialized_end=2603, +) +_sym_db.RegisterEnumDescriptor(_MQTTSTATE) + +MqttState = enum_type_wrapper.EnumTypeWrapper(_MQTTSTATE) +_HTTPSTATE = _descriptor.EnumDescriptor( + name='HttpState', + full_name='google.cloud.iot.v1.HttpState', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='HTTP_STATE_UNSPECIFIED', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HTTP_ENABLED', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='HTTP_DISABLED', index=2, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2605, + serialized_end=2681, +) +_sym_db.RegisterEnumDescriptor(_HTTPSTATE) + +HttpState = enum_type_wrapper.EnumTypeWrapper(_HTTPSTATE) +_PUBLICKEYCERTIFICATEFORMAT = _descriptor.EnumDescriptor( + name='PublicKeyCertificateFormat', + full_name='google.cloud.iot.v1.PublicKeyCertificateFormat', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='X509_CERTIFICATE_PEM', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2683, + serialized_end=2784, +) +_sym_db.RegisterEnumDescriptor(_PUBLICKEYCERTIFICATEFORMAT) + +PublicKeyCertificateFormat = enum_type_wrapper.EnumTypeWrapper(_PUBLICKEYCERTIFICATEFORMAT) +_PUBLICKEYFORMAT = _descriptor.EnumDescriptor( + name='PublicKeyFormat', + full_name='google.cloud.iot.v1.PublicKeyFormat', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='UNSPECIFIED_PUBLIC_KEY_FORMAT', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RSA_PEM', index=1, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='RSA_X509_PEM', index=2, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ES256_PEM', index=3, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='ES256_X509_PEM', index=4, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=2786, + serialized_end=2904, +) +_sym_db.RegisterEnumDescriptor(_PUBLICKEYFORMAT) + +PublicKeyFormat = enum_type_wrapper.EnumTypeWrapper(_PUBLICKEYFORMAT) +MQTT_STATE_UNSPECIFIED = 0 +MQTT_ENABLED = 1 +MQTT_DISABLED = 2 +HTTP_STATE_UNSPECIFIED = 0 +HTTP_ENABLED = 1 +HTTP_DISABLED = 2 +UNSPECIFIED_PUBLIC_KEY_CERTIFICATE_FORMAT = 0 +X509_CERTIFICATE_PEM = 1 +UNSPECIFIED_PUBLIC_KEY_FORMAT = 0 +RSA_PEM = 3 +RSA_X509_PEM = 1 +ES256_PEM = 2 +ES256_X509_PEM = 4 + + + +_DEVICE_METADATAENTRY = _descriptor.Descriptor( + name='MetadataEntry', + full_name='google.cloud.iot.v1.Device.MetadataEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='google.cloud.iot.v1.Device.MetadataEntry.key', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='value', full_name='google.cloud.iot.v1.Device.MetadataEntry.value', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')), + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=825, + serialized_end=872, +) + +_DEVICE = _descriptor.Descriptor( + name='Device', + full_name='google.cloud.iot.v1.Device', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='google.cloud.iot.v1.Device.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.iot.v1.Device.name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='num_id', full_name='google.cloud.iot.v1.Device.num_id', index=2, + number=3, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='credentials', full_name='google.cloud.iot.v1.Device.credentials', index=3, + number=12, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='last_heartbeat_time', full_name='google.cloud.iot.v1.Device.last_heartbeat_time', index=4, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='last_event_time', full_name='google.cloud.iot.v1.Device.last_event_time', index=5, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='last_state_time', full_name='google.cloud.iot.v1.Device.last_state_time', index=6, + number=20, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='last_config_ack_time', full_name='google.cloud.iot.v1.Device.last_config_ack_time', index=7, + number=14, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='last_config_send_time', full_name='google.cloud.iot.v1.Device.last_config_send_time', index=8, + number=18, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='blocked', full_name='google.cloud.iot.v1.Device.blocked', index=9, + number=19, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='last_error_time', full_name='google.cloud.iot.v1.Device.last_error_time', index=10, + number=10, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='last_error_status', full_name='google.cloud.iot.v1.Device.last_error_status', index=11, + number=11, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='config', full_name='google.cloud.iot.v1.Device.config', index=12, + number=13, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='state', full_name='google.cloud.iot.v1.Device.state', index=13, + number=16, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='metadata', full_name='google.cloud.iot.v1.Device.metadata', index=14, + number=17, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[_DEVICE_METADATAENTRY, ], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=155, + serialized_end=872, +) + + +_DEVICEREGISTRY = _descriptor.Descriptor( + name='DeviceRegistry', + full_name='google.cloud.iot.v1.DeviceRegistry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='id', full_name='google.cloud.iot.v1.DeviceRegistry.id', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='name', full_name='google.cloud.iot.v1.DeviceRegistry.name', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='event_notification_configs', full_name='google.cloud.iot.v1.DeviceRegistry.event_notification_configs', index=2, + number=10, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='state_notification_config', full_name='google.cloud.iot.v1.DeviceRegistry.state_notification_config', index=3, + number=7, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='mqtt_config', full_name='google.cloud.iot.v1.DeviceRegistry.mqtt_config', index=4, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='http_config', full_name='google.cloud.iot.v1.DeviceRegistry.http_config', index=5, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='credentials', full_name='google.cloud.iot.v1.DeviceRegistry.credentials', index=6, + number=8, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=875, + serialized_end=1250, +) + + +_MQTTCONFIG = _descriptor.Descriptor( + name='MqttConfig', + full_name='google.cloud.iot.v1.MqttConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='mqtt_enabled_state', full_name='google.cloud.iot.v1.MqttConfig.mqtt_enabled_state', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1252, + serialized_end=1324, +) + + +_HTTPCONFIG = _descriptor.Descriptor( + name='HttpConfig', + full_name='google.cloud.iot.v1.HttpConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='http_enabled_state', full_name='google.cloud.iot.v1.HttpConfig.http_enabled_state', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1326, + serialized_end=1398, +) + + +_EVENTNOTIFICATIONCONFIG = _descriptor.Descriptor( + name='EventNotificationConfig', + full_name='google.cloud.iot.v1.EventNotificationConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='subfolder_matches', full_name='google.cloud.iot.v1.EventNotificationConfig.subfolder_matches', index=0, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='pubsub_topic_name', full_name='google.cloud.iot.v1.EventNotificationConfig.pubsub_topic_name', index=1, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1400, + serialized_end=1479, +) + + +_STATENOTIFICATIONCONFIG = _descriptor.Descriptor( + name='StateNotificationConfig', + full_name='google.cloud.iot.v1.StateNotificationConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='pubsub_topic_name', full_name='google.cloud.iot.v1.StateNotificationConfig.pubsub_topic_name', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1481, + serialized_end=1533, +) + + +_REGISTRYCREDENTIAL = _descriptor.Descriptor( + name='RegistryCredential', + full_name='google.cloud.iot.v1.RegistryCredential', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='public_key_certificate', full_name='google.cloud.iot.v1.RegistryCredential.public_key_certificate', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='credential', full_name='google.cloud.iot.v1.RegistryCredential.credential', + index=0, containing_type=None, fields=[]), + ], + serialized_start=1535, + serialized_end=1646, +) + + +_X509CERTIFICATEDETAILS = _descriptor.Descriptor( + name='X509CertificateDetails', + full_name='google.cloud.iot.v1.X509CertificateDetails', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='issuer', full_name='google.cloud.iot.v1.X509CertificateDetails.issuer', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='subject', full_name='google.cloud.iot.v1.X509CertificateDetails.subject', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='start_time', full_name='google.cloud.iot.v1.X509CertificateDetails.start_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='expiry_time', full_name='google.cloud.iot.v1.X509CertificateDetails.expiry_time', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='signature_algorithm', full_name='google.cloud.iot.v1.X509CertificateDetails.signature_algorithm', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='public_key_type', full_name='google.cloud.iot.v1.X509CertificateDetails.public_key_type', index=5, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1649, + serialized_end=1857, +) + + +_PUBLICKEYCERTIFICATE = _descriptor.Descriptor( + name='PublicKeyCertificate', + full_name='google.cloud.iot.v1.PublicKeyCertificate', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='format', full_name='google.cloud.iot.v1.PublicKeyCertificate.format', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='certificate', full_name='google.cloud.iot.v1.PublicKeyCertificate.certificate', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='x509_details', full_name='google.cloud.iot.v1.PublicKeyCertificate.x509_details', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1860, + serialized_end=2035, +) + + +_DEVICECREDENTIAL = _descriptor.Descriptor( + name='DeviceCredential', + full_name='google.cloud.iot.v1.DeviceCredential', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='public_key', full_name='google.cloud.iot.v1.DeviceCredential.public_key', index=0, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='expiration_time', full_name='google.cloud.iot.v1.DeviceCredential.expiration_time', index=1, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + _descriptor.OneofDescriptor( + name='credential', full_name='google.cloud.iot.v1.DeviceCredential.credential', + index=0, containing_type=None, fields=[]), + ], + serialized_start=2038, + serialized_end=2187, +) + + +_PUBLICKEYCREDENTIAL = _descriptor.Descriptor( + name='PublicKeyCredential', + full_name='google.cloud.iot.v1.PublicKeyCredential', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='format', full_name='google.cloud.iot.v1.PublicKeyCredential.format', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='key', full_name='google.cloud.iot.v1.PublicKeyCredential.key', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2189, + serialized_end=2277, +) + + +_DEVICECONFIG = _descriptor.Descriptor( + name='DeviceConfig', + full_name='google.cloud.iot.v1.DeviceConfig', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='version', full_name='google.cloud.iot.v1.DeviceConfig.version', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='cloud_update_time', full_name='google.cloud.iot.v1.DeviceConfig.cloud_update_time', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='device_ack_time', full_name='google.cloud.iot.v1.DeviceConfig.device_ack_time', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='binary_data', full_name='google.cloud.iot.v1.DeviceConfig.binary_data', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2280, + serialized_end=2440, +) + + +_DEVICESTATE = _descriptor.Descriptor( + name='DeviceState', + full_name='google.cloud.iot.v1.DeviceState', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='update_time', full_name='google.cloud.iot.v1.DeviceState.update_time', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='binary_data', full_name='google.cloud.iot.v1.DeviceState.binary_data', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=2442, + serialized_end=2525, +) + +_DEVICE_METADATAENTRY.containing_type = _DEVICE +_DEVICE.fields_by_name['credentials'].message_type = _DEVICECREDENTIAL +_DEVICE.fields_by_name['last_heartbeat_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DEVICE.fields_by_name['last_event_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DEVICE.fields_by_name['last_state_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DEVICE.fields_by_name['last_config_ack_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DEVICE.fields_by_name['last_config_send_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DEVICE.fields_by_name['last_error_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DEVICE.fields_by_name['last_error_status'].message_type = google_dot_rpc_dot_status__pb2._STATUS +_DEVICE.fields_by_name['config'].message_type = _DEVICECONFIG +_DEVICE.fields_by_name['state'].message_type = _DEVICESTATE +_DEVICE.fields_by_name['metadata'].message_type = _DEVICE_METADATAENTRY +_DEVICEREGISTRY.fields_by_name['event_notification_configs'].message_type = _EVENTNOTIFICATIONCONFIG +_DEVICEREGISTRY.fields_by_name['state_notification_config'].message_type = _STATENOTIFICATIONCONFIG +_DEVICEREGISTRY.fields_by_name['mqtt_config'].message_type = _MQTTCONFIG +_DEVICEREGISTRY.fields_by_name['http_config'].message_type = _HTTPCONFIG +_DEVICEREGISTRY.fields_by_name['credentials'].message_type = _REGISTRYCREDENTIAL +_MQTTCONFIG.fields_by_name['mqtt_enabled_state'].enum_type = _MQTTSTATE +_HTTPCONFIG.fields_by_name['http_enabled_state'].enum_type = _HTTPSTATE +_REGISTRYCREDENTIAL.fields_by_name['public_key_certificate'].message_type = _PUBLICKEYCERTIFICATE +_REGISTRYCREDENTIAL.oneofs_by_name['credential'].fields.append( + _REGISTRYCREDENTIAL.fields_by_name['public_key_certificate']) +_REGISTRYCREDENTIAL.fields_by_name['public_key_certificate'].containing_oneof = _REGISTRYCREDENTIAL.oneofs_by_name['credential'] +_X509CERTIFICATEDETAILS.fields_by_name['start_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_X509CERTIFICATEDETAILS.fields_by_name['expiry_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_PUBLICKEYCERTIFICATE.fields_by_name['format'].enum_type = _PUBLICKEYCERTIFICATEFORMAT +_PUBLICKEYCERTIFICATE.fields_by_name['x509_details'].message_type = _X509CERTIFICATEDETAILS +_DEVICECREDENTIAL.fields_by_name['public_key'].message_type = _PUBLICKEYCREDENTIAL +_DEVICECREDENTIAL.fields_by_name['expiration_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DEVICECREDENTIAL.oneofs_by_name['credential'].fields.append( + _DEVICECREDENTIAL.fields_by_name['public_key']) +_DEVICECREDENTIAL.fields_by_name['public_key'].containing_oneof = _DEVICECREDENTIAL.oneofs_by_name['credential'] +_PUBLICKEYCREDENTIAL.fields_by_name['format'].enum_type = _PUBLICKEYFORMAT +_DEVICECONFIG.fields_by_name['cloud_update_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DEVICECONFIG.fields_by_name['device_ack_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_DEVICESTATE.fields_by_name['update_time'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +DESCRIPTOR.message_types_by_name['Device'] = _DEVICE +DESCRIPTOR.message_types_by_name['DeviceRegistry'] = _DEVICEREGISTRY +DESCRIPTOR.message_types_by_name['MqttConfig'] = _MQTTCONFIG +DESCRIPTOR.message_types_by_name['HttpConfig'] = _HTTPCONFIG +DESCRIPTOR.message_types_by_name['EventNotificationConfig'] = _EVENTNOTIFICATIONCONFIG +DESCRIPTOR.message_types_by_name['StateNotificationConfig'] = _STATENOTIFICATIONCONFIG +DESCRIPTOR.message_types_by_name['RegistryCredential'] = _REGISTRYCREDENTIAL +DESCRIPTOR.message_types_by_name['X509CertificateDetails'] = _X509CERTIFICATEDETAILS +DESCRIPTOR.message_types_by_name['PublicKeyCertificate'] = _PUBLICKEYCERTIFICATE +DESCRIPTOR.message_types_by_name['DeviceCredential'] = _DEVICECREDENTIAL +DESCRIPTOR.message_types_by_name['PublicKeyCredential'] = _PUBLICKEYCREDENTIAL +DESCRIPTOR.message_types_by_name['DeviceConfig'] = _DEVICECONFIG +DESCRIPTOR.message_types_by_name['DeviceState'] = _DEVICESTATE +DESCRIPTOR.enum_types_by_name['MqttState'] = _MQTTSTATE +DESCRIPTOR.enum_types_by_name['HttpState'] = _HTTPSTATE +DESCRIPTOR.enum_types_by_name['PublicKeyCertificateFormat'] = _PUBLICKEYCERTIFICATEFORMAT +DESCRIPTOR.enum_types_by_name['PublicKeyFormat'] = _PUBLICKEYFORMAT +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +Device = _reflection.GeneratedProtocolMessageType('Device', (_message.Message,), dict( + + MetadataEntry = _reflection.GeneratedProtocolMessageType('MetadataEntry', (_message.Message,), dict( + DESCRIPTOR = _DEVICE_METADATAENTRY, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.Device.MetadataEntry) + )) + , + DESCRIPTOR = _DEVICE, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """The device resource. + + + Attributes: + id: + The user-defined device identifier. The device ID must be + unique within a device registry. + name: + The resource path name. For example, + ``projects/p1/locations/us- + central1/registries/registry0/devices/dev0`` or + ``projects/p1/locations/us- + central1/registries/registry0/devices/{num_id}``. When + ``name`` is populated as a response from the service, it + always ends in the device numeric ID. + num_id: + [Output only] A server-defined unique numeric ID for the + device. This is a more compact way to identify devices, and it + is globally unique. + credentials: + The credentials used to authenticate this device. To allow + credential rotation without interruption, multiple device + credentials can be bound to this device. No more than 3 + credentials can be bound to a single device at a time. When + new credentials are added to a device, they are verified + against the registry credentials. For details, see the + description of the ``DeviceRegistry.credentials`` field. + last_heartbeat_time: + [Output only] The last time an MQTT ``PINGREQ`` was received. + This field applies only to devices connecting through MQTT. + MQTT clients usually only send ``PINGREQ`` messages if the + connection is idle, and no other messages have been sent. + Timestamps are periodically collected and written to storage; + they may be stale by a few minutes. + last_event_time: + [Output only] The last time a telemetry event was received. + Timestamps are periodically collected and written to storage; + they may be stale by a few minutes. + last_state_time: + [Output only] The last time a state event was received. + Timestamps are periodically collected and written to storage; + they may be stale by a few minutes. + last_config_ack_time: + [Output only] The last time a cloud-to-device config version + acknowledgment was received from the device. This field is + only for configurations sent through MQTT. + last_config_send_time: + [Output only] The last time a cloud-to-device config version + was sent to the device. + blocked: + If a device is blocked, connections or requests from this + device will fail. Can be used to temporarily prevent the + device from connecting if, for example, the sensor is + generating bad data and needs maintenance. + last_error_time: + [Output only] The time the most recent error occurred, such as + a failure to publish to Cloud Pub/Sub. This field is the + timestamp of 'last\_error\_status'. + last_error_status: + [Output only] The error message of the most recent error, such + as a failure to publish to Cloud Pub/Sub. 'last\_error\_time' + is the timestamp of this field. If no errors have occurred, + this field has an empty message and the status code 0 == OK. + Otherwise, this field is expected to have a status code other + than OK. + config: + The most recent device configuration, which is eventually sent + from Cloud IoT Core to the device. If not present on creation, + the configuration will be initialized with an empty payload + and version value of ``1``. To update this field after + creation, use the ``DeviceManager.ModifyCloudToDeviceConfig`` + method. + state: + [Output only] The state most recently received from the + device. If no state has been reported, this field is not + present. + metadata: + The metadata key-value pairs assigned to the device. This + metadata is not interpreted or indexed by Cloud IoT Core. It + can be used to add contextual information for the device. + Keys must conform to the regular expression + [a-zA-Z][a-zA-Z0-9-\_.+~%]+ and be less than 128 bytes in + length. Values are free-form strings. Each value must be less + than or equal to 32 KB in size. The total size of all keys + and values must be less than 256 KB, and the maximum number of + key-value pairs is 500. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.Device) + )) +_sym_db.RegisterMessage(Device) +_sym_db.RegisterMessage(Device.MetadataEntry) + +DeviceRegistry = _reflection.GeneratedProtocolMessageType('DeviceRegistry', (_message.Message,), dict( + DESCRIPTOR = _DEVICEREGISTRY, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """A container for a group of devices. + + + Attributes: + id: + The identifier of this device registry. For example, + ``myRegistry``. + name: + The resource path name. For example, ``projects/example- + project/locations/us-central1/registries/my-registry``. + event_notification_configs: + The configuration for notification of telemetry events + received from the device. All telemetry events that were + successfully published by the device and acknowledged by Cloud + IoT Core are guaranteed to be delivered to Cloud Pub/Sub. If + multiple configurations match a message, only the first + matching configuration is used. If you try to publish a device + telemetry event using MQTT without specifying a Cloud Pub/Sub + topic for the device's registry, the connection closes + automatically. If you try to do so using an HTTP connection, + an error is returned. Up to 10 configurations may be provided. + state_notification_config: + The configuration for notification of new states received from + the device. State updates are guaranteed to be stored in the + state history, but notifications to Cloud Pub/Sub are not + guaranteed. For example, if permissions are misconfigured or + the specified topic doesn't exist, no notification will be + published but the state will still be stored in Cloud IoT + Core. + mqtt_config: + The MQTT configuration for this device registry. + http_config: + The DeviceService (HTTP) configuration for this device + registry. + credentials: + The credentials used to verify the device credentials. No more + than 10 credentials can be bound to a single registry at a + time. The verification process occurs at the time of device + creation or update. If this field is empty, no verification is + performed. Otherwise, the credentials of a newly created + device or added credentials of an updated device should be + signed with one of these registry credentials. Note, however, + that existing devices will never be affected by modifications + to this list of credentials: after a device has been + successfully created in a registry, it should be able to + connect even if its registry credentials are revoked, deleted, + or modified. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.DeviceRegistry) + )) +_sym_db.RegisterMessage(DeviceRegistry) + +MqttConfig = _reflection.GeneratedProtocolMessageType('MqttConfig', (_message.Message,), dict( + DESCRIPTOR = _MQTTCONFIG, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """The configuration of MQTT for a device registry. + + + Attributes: + mqtt_enabled_state: + If enabled, allows connections using the MQTT protocol. + Otherwise, MQTT connections to this registry will fail. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.MqttConfig) + )) +_sym_db.RegisterMessage(MqttConfig) + +HttpConfig = _reflection.GeneratedProtocolMessageType('HttpConfig', (_message.Message,), dict( + DESCRIPTOR = _HTTPCONFIG, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """The configuration of the HTTP bridge for a device registry. + + + Attributes: + http_enabled_state: + If enabled, allows devices to use DeviceService via the HTTP + protocol. Otherwise, any requests to DeviceService will fail + for this registry. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.HttpConfig) + )) +_sym_db.RegisterMessage(HttpConfig) + +EventNotificationConfig = _reflection.GeneratedProtocolMessageType('EventNotificationConfig', (_message.Message,), dict( + DESCRIPTOR = _EVENTNOTIFICATIONCONFIG, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """The configuration for forwarding telemetry events. + + + Attributes: + subfolder_matches: + If the subfolder name matches this string exactly, this + configuration will be used. The string must not include the + leading '/' character. If empty, all strings are matched. This + field is used only for telemetry events; subfolders are not + supported for state changes. + pubsub_topic_name: + A Cloud Pub/Sub topic name. For example, + ``projects/myProject/topics/deviceEvents``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.EventNotificationConfig) + )) +_sym_db.RegisterMessage(EventNotificationConfig) + +StateNotificationConfig = _reflection.GeneratedProtocolMessageType('StateNotificationConfig', (_message.Message,), dict( + DESCRIPTOR = _STATENOTIFICATIONCONFIG, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """The configuration for notification of new states received from the + device. + + + Attributes: + pubsub_topic_name: + A Cloud Pub/Sub topic name. For example, + ``projects/myProject/topics/deviceEvents``. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.StateNotificationConfig) + )) +_sym_db.RegisterMessage(StateNotificationConfig) + +RegistryCredential = _reflection.GeneratedProtocolMessageType('RegistryCredential', (_message.Message,), dict( + DESCRIPTOR = _REGISTRYCREDENTIAL, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """A server-stored registry credential used to validate device credentials. + + + Attributes: + credential: + The credential data. Reserved for expansion in the future. + public_key_certificate: + A public key certificate used to verify the device + credentials. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.RegistryCredential) + )) +_sym_db.RegisterMessage(RegistryCredential) + +X509CertificateDetails = _reflection.GeneratedProtocolMessageType('X509CertificateDetails', (_message.Message,), dict( + DESCRIPTOR = _X509CERTIFICATEDETAILS, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """Details of an X.509 certificate. For informational purposes only. + + + Attributes: + issuer: + The entity that signed the certificate. + subject: + The entity the certificate and public key belong to. + start_time: + The time the certificate becomes valid. + expiry_time: + The time the certificate becomes invalid. + signature_algorithm: + The algorithm used to sign the certificate. + public_key_type: + The type of public key in the certificate. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.X509CertificateDetails) + )) +_sym_db.RegisterMessage(X509CertificateDetails) + +PublicKeyCertificate = _reflection.GeneratedProtocolMessageType('PublicKeyCertificate', (_message.Message,), dict( + DESCRIPTOR = _PUBLICKEYCERTIFICATE, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """A public key certificate format and data. + + + Attributes: + format: + The certificate format. + certificate: + The certificate data. + x509_details: + [Output only] The certificate details. Used only for X.509 + certificates. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.PublicKeyCertificate) + )) +_sym_db.RegisterMessage(PublicKeyCertificate) + +DeviceCredential = _reflection.GeneratedProtocolMessageType('DeviceCredential', (_message.Message,), dict( + DESCRIPTOR = _DEVICECREDENTIAL, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """A server-stored device credential used for authentication. + + + Attributes: + credential: + The credential data. Reserved for expansion in the future. + public_key: + A public key used to verify the signature of JSON Web Tokens + (JWTs). When adding a new device credential, either via device + creation or via modifications, this public key credential may + be required to be signed by one of the registry level + certificates. More specifically, if the registry contains at + least one certificate, any new device credential must be + signed by one of the registry certificates. As a result, when + the registry contains certificates, only X.509 certificates + are accepted as device credentials. However, if the registry + does not contain a certificate, self-signed certificates and + public keys will be accepted. New device credentials must be + different from every registry-level certificate. + expiration_time: + [Optional] The time at which this credential becomes invalid. + This credential will be ignored for new client authentication + requests after this timestamp; however, it will not be + automatically deleted. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.DeviceCredential) + )) +_sym_db.RegisterMessage(DeviceCredential) + +PublicKeyCredential = _reflection.GeneratedProtocolMessageType('PublicKeyCredential', (_message.Message,), dict( + DESCRIPTOR = _PUBLICKEYCREDENTIAL, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """A public key format and data. + + + Attributes: + format: + The format of the key. + key: + The key data. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.PublicKeyCredential) + )) +_sym_db.RegisterMessage(PublicKeyCredential) + +DeviceConfig = _reflection.GeneratedProtocolMessageType('DeviceConfig', (_message.Message,), dict( + DESCRIPTOR = _DEVICECONFIG, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """The device configuration. Eventually delivered to devices. + + + Attributes: + version: + [Output only] The version of this update. The version number + is assigned by the server, and is always greater than 0 after + device creation. The version must be 0 on the ``CreateDevice`` + request if a ``config`` is specified; the response of + ``CreateDevice`` will always have a value of 1. + cloud_update_time: + [Output only] The time at which this configuration version was + updated in Cloud IoT Core. This timestamp is set by the + server. + device_ack_time: + [Output only] The time at which Cloud IoT Core received the + acknowledgment from the device, indicating that the device has + received this configuration version. If this field is not + present, the device has not yet acknowledged that it received + this version. Note that when the config was sent to the + device, many config versions may have been available in Cloud + IoT Core while the device was disconnected, and on connection, + only the latest version is sent to the device. Some versions + may never be sent to the device, and therefore are never + acknowledged. This timestamp is set by Cloud IoT Core. + binary_data: + The device configuration data. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.DeviceConfig) + )) +_sym_db.RegisterMessage(DeviceConfig) + +DeviceState = _reflection.GeneratedProtocolMessageType('DeviceState', (_message.Message,), dict( + DESCRIPTOR = _DEVICESTATE, + __module__ = 'google.cloud.iot_v1.proto.resources_pb2' + , + __doc__ = """The device state, as reported by the device. + + + Attributes: + update_time: + [Output only] The time at which this state version was updated + in Cloud IoT Core. + binary_data: + The device state data. + """, + # @@protoc_insertion_point(class_scope:google.cloud.iot.v1.DeviceState) + )) +_sym_db.RegisterMessage(DeviceState) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.google.cloud.iot.v1B\016ResourcesProtoP\001Z6google.golang.org/genproto/googleapis/cloud/iot/v1;iot\370\001\001')) +_DEVICE_METADATAENTRY.has_options = True +_DEVICE_METADATAENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')) +# @@protoc_insertion_point(module_scope) diff --git a/iot/google/cloud/iot_v1/proto/resources_pb2_grpc.py b/iot/google/cloud/iot_v1/proto/resources_pb2_grpc.py new file mode 100644 index 000000000000..a89435267cb2 --- /dev/null +++ b/iot/google/cloud/iot_v1/proto/resources_pb2_grpc.py @@ -0,0 +1,3 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +import grpc + diff --git a/iot/google/cloud/iot_v1/types.py b/iot/google/cloud/iot_v1/types.py new file mode 100644 index 000000000000..63611fa64034 --- /dev/null +++ b/iot/google/cloud/iot_v1/types.py @@ -0,0 +1,55 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import sys + +from google.api_core.protobuf_helpers import get_messages + +from google.api import http_pb2 +from google.cloud.iot_v1.proto import device_manager_pb2 +from google.cloud.iot_v1.proto import resources_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.iam.v1.logging import audit_data_pb2 +from google.protobuf import any_pb2 +from google.protobuf import descriptor_pb2 +from google.protobuf import duration_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 +from google.protobuf import timestamp_pb2 +from google.rpc import status_pb2 + +names = [] +for module in ( + http_pb2, + device_manager_pb2, + resources_pb2, + iam_policy_pb2, + policy_pb2, + audit_data_pb2, + any_pb2, + descriptor_pb2, + duration_pb2, + empty_pb2, + field_mask_pb2, + timestamp_pb2, + status_pb2, +): + for name, message in get_messages(module).items(): + message.__module__ = 'google.cloud.iot_v1.types' + setattr(sys.modules[__name__], name, message) + names.append(name) + +__all__ = tuple(sorted(names)) diff --git a/iot/nox.py b/iot/nox.py new file mode 100644 index 000000000000..e7eebe585f33 --- /dev/null +++ b/iot/nox.py @@ -0,0 +1,76 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import os + +import nox + + +@nox.session +def default(session): + return unit(session, 'default') + + +@nox.session +@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7']) +def unit(session, py): + """Run the unit test suite.""" + + # Run unit tests against all supported versions of Python. + if py != 'default': + session.interpreter = 'python{}'.format(py) + + # Set the virtualenv directory name. + session.virtualenv_dirname = 'unit-' + py + + # Install all test dependencies, then install this package in-place. + session.install('pytest') + session.install('-e', '.') + + # Run py.test against the unit tests. + session.run('py.test', '--quiet', os.path.join('tests', 'unit')) + + +@nox.session +@nox.parametrize('py', ['2.7', '3.7']) +def system(session, py): + """Run the system test suite.""" + + # Sanity check: Only run system tests if the environment variable is set. + if not os.environ.get('GOOGLE_APPLICATION_CREDENTIALS', ''): + session.skip('Credentials must be set via environment variable.') + + # Run unit tests against all supported versions of Python. + session.interpreter = 'python{}'.format(py) + + # Set the virtualenv dirname. + session.virtualenv_dirname = 'sys-' + py + + # Install all test dependencies, then install this package in-place. + session.install('pytest') + session.install('-e', '.') + + # Run py.test against the unit tests. + session.run('py.test', '--quiet', os.path.join('tests', 'system'), + *session.posargs) + + +@nox.session +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.interpreter = 'python3.6' + session.install('docutils', 'pygments') + session.run('python', 'setup.py', 'check', '--restructuredtext', + '--strict') diff --git a/iot/setup.cfg b/iot/setup.cfg new file mode 100644 index 000000000000..2a9acf13daa9 --- /dev/null +++ b/iot/setup.cfg @@ -0,0 +1,2 @@ +[bdist_wheel] +universal = 1 diff --git a/iot/setup.py b/iot/setup.py new file mode 100644 index 000000000000..bfe386092720 --- /dev/null +++ b/iot/setup.py @@ -0,0 +1,74 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io +import os + +import setuptools + +name = 'google-cloud-iot' +description = 'Cloud IoT API API client library' +version = '0.1.0' +release_status = '3 - Alpha' +dependencies = [ + 'google-api-core[grpc] >= 1.1.0, < 2.0.0dev', + 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', + 'enum34; python_version < "3.4"', +] + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +packages = [ + package for package in setuptools.find_packages() + if package.startswith('google') +] + +namespaces = ['google'] +if 'google.cloud' in packages: + namespaces.append('google.cloud') + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author='Google LLC', + author_email='googleapis-packages@oogle.com', + license='Apache 2.0', + url='https://github.com/GoogleCloudPlatform/google-cloud-python', + classifiers=[ + release_status, + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Programming Language :: Python', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6', + 'Operating System :: OS Independent', + 'Topic :: Internet', + ], + platforms='Posix; MacOS X; Windows', + packages=packages, + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/iot/tests/system/gapic/v1/test_system_device_manager_v1.py b/iot/tests/system/gapic/v1/test_system_device_manager_v1.py new file mode 100644 index 000000000000..c42e12536473 --- /dev/null +++ b/iot/tests/system/gapic/v1/test_system_device_manager_v1.py @@ -0,0 +1,28 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import time + +from google.cloud import iot_v1 +from google.cloud.iot_v1.proto import device_manager_pb2 + + +class TestSystemDeviceManager(object): + def test_list_device_registries(self): + project_id = os.environ['PROJECT_ID'] + + client = iot_v1.DeviceManagerClient() + parent = client.location_path(project_id, "us-central1") + response = client.list_device_registries(parent) diff --git a/iot/tests/unit/gapic/v1/test_device_manager_client_v1.py b/iot/tests/unit/gapic/v1/test_device_manager_client_v1.py new file mode 100644 index 000000000000..b632e5cec3a3 --- /dev/null +++ b/iot/tests/unit/gapic/v1/test_device_manager_client_v1.py @@ -0,0 +1,654 @@ +# Copyright 2018 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Unit tests.""" + +import pytest + +from google.cloud import iot_v1 +from google.cloud.iot_v1.proto import device_manager_pb2 +from google.cloud.iot_v1.proto import resources_pb2 +from google.iam.v1 import iam_policy_pb2 +from google.iam.v1 import policy_pb2 +from google.protobuf import empty_pb2 +from google.protobuf import field_mask_pb2 + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + response = None + if self.channel_stub.responses: + response = self.channel_stub.responses.pop() + + if isinstance(response, Exception): + raise response + + if response: + return response + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + +class CustomException(Exception): + pass + + +class TestDeviceManagerClient(object): + def test_create_device_registry(self): + # Setup Expected Response + id_ = 'id3355' + name = 'name3373707' + expected_response = {'id': id_, 'name': name} + expected_response = resources_pb2.DeviceRegistry(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + parent = client.location_path('[PROJECT]', '[LOCATION]') + device_registry = {} + + response = client.create_device_registry(parent, device_registry) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.CreateDeviceRegistryRequest( + parent=parent, device_registry=device_registry) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_device_registry_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + parent = client.location_path('[PROJECT]', '[LOCATION]') + device_registry = {} + + with pytest.raises(CustomException): + client.create_device_registry(parent, device_registry) + + def test_get_device_registry(self): + # Setup Expected Response + id_ = 'id3355' + name_2 = 'name2-1052831874' + expected_response = {'id': id_, 'name': name_2} + expected_response = resources_pb2.DeviceRegistry(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + name = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + + response = client.get_device_registry(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.GetDeviceRegistryRequest( + name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_device_registry_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + name = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + + with pytest.raises(CustomException): + client.get_device_registry(name) + + def test_update_device_registry(self): + # Setup Expected Response + id_ = 'id3355' + name = 'name3373707' + expected_response = {'id': id_, 'name': name} + expected_response = resources_pb2.DeviceRegistry(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + device_registry = {} + update_mask = {} + + response = client.update_device_registry(device_registry, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.UpdateDeviceRegistryRequest( + device_registry=device_registry, update_mask=update_mask) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_device_registry_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + device_registry = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_device_registry(device_registry, update_mask) + + def test_delete_device_registry(self): + channel = ChannelStub() + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + name = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + + client.delete_device_registry(name) + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.DeleteDeviceRegistryRequest( + name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_device_registry_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + name = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + + with pytest.raises(CustomException): + client.delete_device_registry(name) + + def test_list_device_registries(self): + # Setup Expected Response + next_page_token = '' + device_registries_element = {} + device_registries = [device_registries_element] + expected_response = { + 'next_page_token': next_page_token, + 'device_registries': device_registries + } + expected_response = device_manager_pb2.ListDeviceRegistriesResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + parent = client.location_path('[PROJECT]', '[LOCATION]') + + paged_list_response = client.list_device_registries(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.device_registries[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.ListDeviceRegistriesRequest( + parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_device_registries_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + parent = client.location_path('[PROJECT]', '[LOCATION]') + + paged_list_response = client.list_device_registries(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_create_device(self): + # Setup Expected Response + id_ = 'id3355' + name = 'name3373707' + num_id = 1034366860 + blocked = True + expected_response = { + 'id': id_, + 'name': name, + 'num_id': num_id, + 'blocked': blocked + } + expected_response = resources_pb2.Device(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + device = {} + + response = client.create_device(parent, device) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.CreateDeviceRequest( + parent=parent, device=device) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_create_device_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + device = {} + + with pytest.raises(CustomException): + client.create_device(parent, device) + + def test_get_device(self): + # Setup Expected Response + id_ = 'id3355' + name_2 = 'name2-1052831874' + num_id = 1034366860 + blocked = True + expected_response = { + 'id': id_, + 'name': name_2, + 'num_id': num_id, + 'blocked': blocked + } + expected_response = resources_pb2.Device(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', + '[DEVICE]') + + response = client.get_device(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.GetDeviceRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_device_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', + '[DEVICE]') + + with pytest.raises(CustomException): + client.get_device(name) + + def test_update_device(self): + # Setup Expected Response + id_ = 'id3355' + name = 'name3373707' + num_id = 1034366860 + blocked = True + expected_response = { + 'id': id_, + 'name': name, + 'num_id': num_id, + 'blocked': blocked + } + expected_response = resources_pb2.Device(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + device = {} + update_mask = {} + + response = client.update_device(device, update_mask) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.UpdateDeviceRequest( + device=device, update_mask=update_mask) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_update_device_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + device = {} + update_mask = {} + + with pytest.raises(CustomException): + client.update_device(device, update_mask) + + def test_delete_device(self): + channel = ChannelStub() + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', + '[DEVICE]') + + client.delete_device(name) + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.DeleteDeviceRequest(name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_delete_device_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', + '[DEVICE]') + + with pytest.raises(CustomException): + client.delete_device(name) + + def test_list_devices(self): + # Setup Expected Response + next_page_token = '' + devices_element = {} + devices = [devices_element] + expected_response = { + 'next_page_token': next_page_token, + 'devices': devices + } + expected_response = device_manager_pb2.ListDevicesResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + + paged_list_response = client.list_devices(parent) + resources = list(paged_list_response) + assert len(resources) == 1 + + assert expected_response.devices[0] == resources[0] + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.ListDevicesRequest(parent=parent) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_devices_exception(self): + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + parent = client.registry_path('[PROJECT]', '[LOCATION]', '[REGISTRY]') + + paged_list_response = client.list_devices(parent) + with pytest.raises(CustomException): + list(paged_list_response) + + def test_modify_cloud_to_device_config(self): + # Setup Expected Response + version = 351608024 + binary_data_2 = b'-37' + expected_response = {'version': version, 'binary_data': binary_data_2} + expected_response = resources_pb2.DeviceConfig(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', + '[DEVICE]') + binary_data = b'40' + + response = client.modify_cloud_to_device_config(name, binary_data) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.ModifyCloudToDeviceConfigRequest( + name=name, binary_data=binary_data) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_modify_cloud_to_device_config_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', + '[DEVICE]') + binary_data = b'40' + + with pytest.raises(CustomException): + client.modify_cloud_to_device_config(name, binary_data) + + def test_list_device_config_versions(self): + # Setup Expected Response + expected_response = {} + expected_response = device_manager_pb2.ListDeviceConfigVersionsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', + '[DEVICE]') + + response = client.list_device_config_versions(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.ListDeviceConfigVersionsRequest( + name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_device_config_versions_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', + '[DEVICE]') + + with pytest.raises(CustomException): + client.list_device_config_versions(name) + + def test_list_device_states(self): + # Setup Expected Response + expected_response = {} + expected_response = device_manager_pb2.ListDeviceStatesResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', + '[DEVICE]') + + response = client.list_device_states(name) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = device_manager_pb2.ListDeviceStatesRequest( + name=name) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_list_device_states_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + name = client.device_path('[PROJECT]', '[LOCATION]', '[REGISTRY]', + '[DEVICE]') + + with pytest.raises(CustomException): + client.list_device_states(name) + + def test_set_iam_policy(self): + # Setup Expected Response + version = 351608024 + etag = b'21' + expected_response = {'version': version, 'etag': etag} + expected_response = policy_pb2.Policy(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + resource = client.registry_path('[PROJECT]', '[LOCATION]', + '[REGISTRY]') + policy = {} + + response = client.set_iam_policy(resource, policy) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.SetIamPolicyRequest( + resource=resource, policy=policy) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_set_iam_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + resource = client.registry_path('[PROJECT]', '[LOCATION]', + '[REGISTRY]') + policy = {} + + with pytest.raises(CustomException): + client.set_iam_policy(resource, policy) + + def test_get_iam_policy(self): + # Setup Expected Response + version = 351608024 + etag = b'21' + expected_response = {'version': version, 'etag': etag} + expected_response = policy_pb2.Policy(**expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + resource = client.registry_path('[PROJECT]', '[LOCATION]', + '[REGISTRY]') + + response = client.get_iam_policy(resource) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.GetIamPolicyRequest( + resource=resource) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_get_iam_policy_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + resource = client.registry_path('[PROJECT]', '[LOCATION]', + '[REGISTRY]') + + with pytest.raises(CustomException): + client.get_iam_policy(resource) + + def test_test_iam_permissions(self): + # Setup Expected Response + expected_response = {} + expected_response = iam_policy_pb2.TestIamPermissionsResponse( + **expected_response) + + # Mock the API response + channel = ChannelStub(responses=[expected_response]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup Request + resource = client.registry_path('[PROJECT]', '[LOCATION]', + '[REGISTRY]') + permissions = [] + + response = client.test_iam_permissions(resource, permissions) + assert expected_response == response + + assert len(channel.requests) == 1 + expected_request = iam_policy_pb2.TestIamPermissionsRequest( + resource=resource, permissions=permissions) + actual_request = channel.requests[0][1] + assert expected_request == actual_request + + def test_test_iam_permissions_exception(self): + # Mock the API response + channel = ChannelStub(responses=[CustomException()]) + client = iot_v1.DeviceManagerClient(channel=channel) + + # Setup request + resource = client.registry_path('[PROJECT]', '[LOCATION]', + '[REGISTRY]') + permissions = [] + + with pytest.raises(CustomException): + client.test_iam_permissions(resource, permissions) From f6bc038f27da83750ad4399f1b863c6ac155a761 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 22 May 2018 15:46:45 -0700 Subject: [PATCH 28/75] Release 0.30.0 (#5356) --- error_reporting/CHANGELOG.md | 10 ++++++++++ error_reporting/setup.py | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/error_reporting/CHANGELOG.md b/error_reporting/CHANGELOG.md index 850fd44b4954..dd7d42e26068 100644 --- a/error_reporting/CHANGELOG.md +++ b/error_reporting/CHANGELOG.md @@ -4,6 +4,16 @@ [1]: https://pypi.org/project/google-cloud-error-reporting/#history +## 0.30.0 + +### Implementation Changes +- Make dependency on logging less restrictive in error_reporting (#5345) + +### Internal / Testing Changes +- Modify system tests to use prerelease versions of grpcio (#5304) +- Add Test runs for Python 3.7 and remove 3.4 (#5295) +- Fix bad trove classifier + ## 0.29.1 ### Dependencies diff --git a/error_reporting/setup.py b/error_reporting/setup.py index 8fc1638fd35f..1fe16f1286d2 100644 --- a/error_reporting/setup.py +++ b/error_reporting/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-error-reporting' description = 'Stackdriver Error Reporting API client library' -version = '0.29.1' +version = '0.30.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 872bff396c2bebfa6355e6ec7a4b0413eac6bed3 Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Wed, 23 May 2018 09:54:29 -0700 Subject: [PATCH 29/75] BigQuery: adds load partitioned table sample and refactors existing partition samples (#5368) --- docs/bigquery/snippets.py | 97 +++++++++++++++++++++++++++------------ 1 file changed, 68 insertions(+), 29 deletions(-) diff --git a/docs/bigquery/snippets.py b/docs/bigquery/snippets.py index d5d22b7d8520..2db75242718e 100644 --- a/docs/bigquery/snippets.py +++ b/docs/bigquery/snippets.py @@ -514,21 +514,15 @@ def test_create_partitioned_table(client, to_delete): table_ref = dataset_ref.table('my_partitioned_table') schema = [ - bigquery.SchemaField('requested_url', 'STRING', mode='REQUIRED'), - bigquery.SchemaField('request_ts', 'TIMESTAMP', mode='REQUIRED'), - bigquery.SchemaField( - 'response_info', 'RECORD', fields=[ - bigquery.SchemaField('backend_server', 'STRING'), - bigquery.SchemaField('response_ms', 'INTEGER'), - ] - ), + bigquery.SchemaField('name', 'STRING'), + bigquery.SchemaField('post_abbr', 'STRING'), + bigquery.SchemaField('date', 'DATE') ] table = bigquery.Table(table_ref, schema=schema) - time_partitioning = bigquery.TimePartitioning( + table.time_partitioning = bigquery.TimePartitioning( type_=bigquery.TimePartitioningType.DAY, - field='request_ts', # name of column to use for partitioning + field='date', # name of column to use for partitioning expiration_ms=7776000000) # 90 days - table.time_partitioning = time_partitioning table = client.create_table(table) @@ -537,32 +531,79 @@ def test_create_partitioned_table(client, to_delete): # [END bigquery_create_table_partitioned] assert table.time_partitioning.type_ == 'DAY' - assert table.time_partitioning.field == 'request_ts' + assert table.time_partitioning.field == 'date' assert table.time_partitioning.expiration_ms == 7776000000 -def test_query_partitioned_table(client, to_delete): +def test_load_and_query_partitioned_table(client, to_delete): + dataset_id = 'load_partitioned_table_dataset_{}'.format(_millis()) + dataset = bigquery.Dataset(client.dataset(dataset_id)) + client.create_dataset(dataset) + to_delete.append(dataset) + + # [START bigquery_load_table_partitioned] + # from google.cloud import bigquery + # client = bigquery.Client() + # dataset_id = 'my_dataset' + table_id = 'us_states_by_date' + + dataset_ref = client.dataset(dataset_id) + job_config = bigquery.LoadJobConfig() + job_config.schema = [ + bigquery.SchemaField('name', 'STRING'), + bigquery.SchemaField('post_abbr', 'STRING'), + bigquery.SchemaField('date', 'DATE') + ] + job_config.skip_leading_rows = 1 + job_config.time_partitioning = bigquery.TimePartitioning( + type_=bigquery.TimePartitioningType.DAY, + field='date', # name of column to use for partitioning + expiration_ms=7776000000) # 90 days + uri = 'gs://cloud-samples-data/bigquery/us-states/us-states-by-date.csv' + + load_job = client.load_table_from_uri( + uri, + dataset_ref.table(table_id), + job_config=job_config) # API request + + assert load_job.job_type == 'load' + + load_job.result() # Waits for table load to complete. + + table = client.get_table(dataset_ref.table(table_id)) + print("Loaded {} rows to table {}".format(table.num_rows, table_id)) + # [END bigquery_load_table_partitioned] + assert table.num_rows == 50 + + project_id = client.project + # [START bigquery_query_partitioned_table] import datetime - import pytz # from google.cloud import bigquery # client = bigquery.Client() + # project_id = 'my-project' + # dataset_id = 'my_dataset' + table_id = 'us_states_by_date' - sql = """ + sql_template = """ SELECT * - FROM `bigquery-partition-samples.samples.stackoverflow_comments` - WHERE creation_date > @mytime + FROM `{}.{}.{}` + WHERE date BETWEEN @start_date AND @end_date """ - query_parameters = [ + sql = sql_template.format(project_id, dataset_id, table_id) + job_config = bigquery.QueryJobConfig() + job_config.query_parameters = [ bigquery.ScalarQueryParameter( - 'mytime', - 'TIMESTAMP', - datetime.datetime(2016, 1, 1, 0, 0, tzinfo=pytz.UTC) + 'start_date', + 'DATE', + datetime.date(1800, 1, 1) + ), + bigquery.ScalarQueryParameter( + 'end_date', + 'DATE', + datetime.date(1899, 12, 31) ) ] - job_config = bigquery.QueryJobConfig() - job_config.query_parameters = query_parameters - job_config.dry_run = True query_job = client.query( sql, @@ -570,12 +611,10 @@ def test_query_partitioned_table(client, to_delete): location='US', job_config=job_config) # API request - # A dry run query completes immediately. - assert query_job.state == 'DONE' - - print("This query will process {} bytes.".format( - query_job.total_bytes_processed)) + rows = list(query_job) + print("{} states were admitted to the US in the 1800s".format(len(rows))) # [END bigquery_query_partitioned_table] + assert len(rows) == 29 def test_get_table_information(client, to_delete): From f79717decde5c1aec3e42e69f8bc1f0512797a73 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 10:30:01 -0700 Subject: [PATCH 30/75] Make re-open failures bubble to callbacks (#5372) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 26 ++++++++-- .../unit/pubsub_v1/subscriber/test_bidi.py | 50 +++++++++++++++++-- 2 files changed, 68 insertions(+), 8 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index 215e38e9ac68..518c7c91275e 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -170,8 +170,8 @@ def __init__(self, start_rpc, initial_request=None): self._request_queue = queue.Queue() self._request_generator = None self._is_active = False - self.call = None self._callbacks = [] + self.call = None def add_done_callback(self, callback): """Adds a callback that will be called when the RPC terminates. @@ -311,14 +311,25 @@ def __init__(self, start_rpc, should_recover, initial_request=None): super(ResumableBidiRpc, self).__init__(start_rpc, initial_request) self._should_recover = should_recover self._operational_lock = threading.Lock() + self._finalized = False + self._finalize_lock = threading.Lock() + + def _finalize(self, result): + with self._finalize_lock: + if self._finalized: + return + + for callback in self._callbacks: + callback(result) + + self._finalized = True def _on_call_done(self, future): # Unlike the base class, we only execute the callbacks on a terminal # error, not for errors that we can recover from. Note that grpc's # "future" here is also a grpc.RpcError. if not self._should_recover(future): - for callback in self._callbacks: - callback(future) + self._finalize(future) def _reopen(self): with self._operational_lock: @@ -330,7 +341,14 @@ def _reopen(self): # Request generator should exit cleanly since the RPC its bound to # has exited. self.request_generator = None - self.open() + + try: + self.open() + # If re-opening fails, consider this a terminal error and finalize + # the object. + except Exception as exc: + self._finalize(exc) + raise def _recoverable(self, method, *args, **kwargs): """Wraps a method to recover the stream and retry on error. diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py index 4e79ee3e6ce4..b040e7f97887 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py @@ -310,7 +310,7 @@ def test_send_recover(self): grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]) - should_recover = mock.Mock(autospec=['__call__'], return_value=True) + should_recover = mock.Mock(spec=['__call__'], return_value=True) bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) bidi_rpc.open() @@ -331,7 +331,7 @@ def test_send_failure(self): grpc.StreamStreamMultiCallable, instance=True, return_value=call) - should_recover = mock.Mock(autospec=['__call__'], return_value=False) + should_recover = mock.Mock(spec=['__call__'], return_value=False) bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) bidi_rpc.open() @@ -355,7 +355,7 @@ def test_recv_recover(self): grpc.StreamStreamMultiCallable, instance=True, side_effect=[call_1, call_2]) - should_recover = mock.Mock(autospec=['__call__'], return_value=True) + should_recover = mock.Mock(spec=['__call__'], return_value=True) bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) bidi_rpc.open() @@ -412,7 +412,7 @@ def test_recv_failure(self): grpc.StreamStreamMultiCallable, instance=True, return_value=call) - should_recover = mock.Mock(autospec=['__call__'], return_value=False) + should_recover = mock.Mock(spec=['__call__'], return_value=False) bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) bidi_rpc.open() @@ -426,6 +426,48 @@ def test_recv_failure(self): assert bidi_rpc.is_active is False assert call.cancelled is True + def test_reopen_failure_on_rpc_restart(self): + error1 = ValueError('1') + error2 = ValueError('2') + call = CallStub([error1]) + # Invoking start RPC a second time will trigger an error. + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, + instance=True, + side_effect=[call, error2]) + should_recover = mock.Mock(spec=['__call__'], return_value=True) + callback = mock.Mock(spec=['__call__']) + + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, should_recover) + bidi_rpc.add_done_callback(callback) + + bidi_rpc.open() + + with pytest.raises(ValueError) as exc_info: + bidi_rpc.recv() + + assert exc_info.value == error2 + should_recover.assert_called_once_with(error1) + assert bidi_rpc.call is None + assert bidi_rpc.is_active is False + callback.assert_called_once_with(error2) + + def test_finalize_idempotent(self): + error1 = ValueError('1') + error2 = ValueError('2') + callback = mock.Mock(spec=['__call__']) + should_recover = mock.Mock(spec=['__call__'], return_value=False) + + bidi_rpc = bidi.ResumableBidiRpc( + mock.sentinel.start_rpc, should_recover) + + bidi_rpc.add_done_callback(callback) + + bidi_rpc._on_call_done(error1) + bidi_rpc._on_call_done(error2) + + callback.assert_called_once_with(error1) + class TestBackgroundConsumer(object): def test_consume_once_then_exit(self): From 1edb9009239324415e7ea1f98f09e248ab4260bc Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 11:10:58 -0700 Subject: [PATCH 31/75] Fix example in subscribe's documentation (#5375) --- pubsub/google/cloud/pubsub_v1/subscriber/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index b567ed6cb9f2..c4906bbbeb21 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -135,7 +135,7 @@ def callback(message): print(message) message.ack() - future = subscriber.subscribe_experimental( + future = subscriber.subscribe( subscription, callback) try: From ebe6a9f66310d53c25158c370708361d4f6ae5c7 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 14:00:10 -0700 Subject: [PATCH 32/75] Make leaser exit more quickly (#5373) --- .../pubsub_v1/subscriber/_protocol/leaser.py | 2 +- .../unit/pubsub_v1/subscriber/test_leaser.py | 29 ++++++++----------- 2 files changed, 13 insertions(+), 18 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py index 02e78577ff70..c3ef6565587a 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/leaser.py @@ -159,7 +159,7 @@ def maintain_leases(self): # where there are many clients. snooze = random.uniform(0.0, p99 * 0.9) _LOGGER.debug('Snoozing lease management for %f seconds.', snooze) - time.sleep(snooze) + self._stop_event.wait(timeout=snooze) _LOGGER.info('%s exiting.', _LEASE_WORKER_NAME) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py index 6c16276e8f15..447fa79f5036 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_leaser.py @@ -119,20 +119,20 @@ def test_maintain_leases_stopped(caplog): assert 'exiting' in caplog.text -def make_sleep_mark_manager_as_inactive(sleep, manager): +def make_sleep_mark_manager_as_inactive(leaser): # Make sleep mark the manager as inactive so that maintain_leases # exits at the end of the first run. - def trigger_inactive(seconds): - assert 0 < seconds < 10 - manager.is_active = False - sleep.side_effect = trigger_inactive + def trigger_inactive(timeout): + assert 0 < timeout < 10 + leaser._manager.is_active = False + leaser._stop_event.wait = trigger_inactive -@mock.patch('time.sleep', autospec=True) -def test_maintain_leases_ack_ids(sleep): + +def test_maintain_leases_ack_ids(): manager = create_manager() - make_sleep_mark_manager_as_inactive(sleep, manager) leaser_ = leaser.Leaser(manager) + make_sleep_mark_manager_as_inactive(leaser_) leaser_.add([requests.LeaseRequest(ack_id='my ack id', byte_size=50)]) leaser_.maintain_leases() @@ -143,27 +143,23 @@ def test_maintain_leases_ack_ids(sleep): seconds=10, ) ]) - sleep.assert_called() -@mock.patch('time.sleep', autospec=True) -def test_maintain_leases_no_ack_ids(sleep): +def test_maintain_leases_no_ack_ids(): manager = create_manager() - make_sleep_mark_manager_as_inactive(sleep, manager) leaser_ = leaser.Leaser(manager) + make_sleep_mark_manager_as_inactive(leaser_) leaser_.maintain_leases() manager.dispatcher.modify_ack_deadline.assert_not_called() - sleep.assert_called() @mock.patch('time.time', autospec=True) -@mock.patch('time.sleep', autospec=True) -def test_maintain_leases_outdated_items(sleep, time): +def test_maintain_leases_outdated_items(time): manager = create_manager() - make_sleep_mark_manager_as_inactive(sleep, manager) leaser_ = leaser.Leaser(manager) + make_sleep_mark_manager_as_inactive(leaser_) # Add these items at the beginning of the timeline time.return_value = 0 @@ -190,7 +186,6 @@ def test_maintain_leases_outdated_items(sleep, time): manager.dispatcher.drop.assert_called_once_with([ requests.DropRequest(ack_id='ack1', byte_size=50) ]) - sleep.assert_called() @mock.patch('threading.Thread', autospec=True) From ba9be1324c91de7218a9da72ff88488b625f9cd8 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 14:00:26 -0700 Subject: [PATCH 33/75] Initialize references to helper threads before starting them (#5374) --- .../_protocol/streaming_pull_manager.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 2fb93e7cfda7..e5219fc4eecd 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -241,22 +241,26 @@ def open(self, callback): self._callback = functools.partial(_wrap_callback_errors, callback) - # Start the thread to pass the requests. - self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) - self._dispatcher.start() - - # Start consuming messages. + # Create the RPC self._rpc = bidi.ResumableBidiRpc( start_rpc=self._client.api.streaming_pull, initial_request=self._get_initial_request, should_recover=self._should_recover) self._rpc.add_done_callback(self._on_rpc_done) + + # Create references to threads + self._dispatcher = dispatcher.Dispatcher(self, self._scheduler.queue) self._consumer = bidi.BackgroundConsumer( self._rpc, self._on_response) + self._leaser = leaser.Leaser(self) + + # Start the thread to pass the requests. + self._dispatcher.start() + + # Start consuming messages. self._consumer.start() # Start the lease maintainer thread. - self._leaser = leaser.Leaser(self) self._leaser.start() def close(self, reason=None): From 0694aab7e4fbe0facb3d64879e5dd92c07a56a1e Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 15:20:43 -0700 Subject: [PATCH 34/75] Send requests during streaming pull over a separate unary RPC (#5377) --- .../subscriber/_protocol/helper_threads.py | 2 +- .../_protocol/streaming_pull_manager.py | 42 ++++++++++++++++++- .../subscriber/test_streaming_pull_manager.py | 35 +++++++++++++++- 3 files changed, 75 insertions(+), 4 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py index ac38101bd96f..edb22d14fea5 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/helper_threads.py @@ -111,6 +111,6 @@ def __call__(self): try: self._callback(items) except Exception as exc: - _LOGGER.error('%s: %s', exc.__class__.__name__, exc) + _LOGGER.exception('Error in queue callback worker: %s', exc) _LOGGER.debug('Exiting the QueueCallbackWorker.') diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index e5219fc4eecd..aa0876798ad9 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -14,13 +14,15 @@ from __future__ import division +import collections import functools import logging import threading -from google.api_core import exceptions import grpc +import six +from google.api_core import exceptions from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import bidi from google.cloud.pubsub_v1.subscriber._protocol import dispatcher @@ -84,6 +86,10 @@ class StreamingPullManager(object): scheduler will be used. """ + _UNARY_REQUESTS = True + """If set to True, this class will make requests over a separate unary + RPC instead of over the streaming RPC.""" + def __init__(self, client, subscription, flow_control=types.FlowControl(), scheduler=None): self._client = client @@ -220,9 +226,41 @@ def maybe_resume_consumer(self): else: _LOGGER.debug('Did not resume, current load is %s', self.load) + def _send_unary_request(self, request): + """Send a request using a separate unary request instead of over the + stream. + + Args: + request (types.StreamingPullRequest): The stream request to be + mapped into unary requests. + """ + if request.ack_ids: + self._client.acknowledge( + subscription=self._subscription, + ack_ids=list(request.ack_ids)) + + if request.modify_deadline_ack_ids: + # Send ack_ids with the same deadline seconds together. + deadline_to_ack_ids = collections.defaultdict(list) + + for n, ack_id in enumerate(request.modify_deadline_ack_ids): + deadline = request.modify_deadline_seconds[n] + deadline_to_ack_ids[deadline].append(ack_id) + + for deadline, ack_ids in six.iteritems(deadline_to_ack_ids): + self._client.modify_ack_deadline( + subscription=self._subscription, + ack_ids=ack_ids, + ack_deadline_seconds=deadline) + + _LOGGER.debug('Sent request(s) over unary RPC.') + def send(self, request): """Queue a request to be sent to the RPC.""" - self._rpc.send(request) + if self._UNARY_REQUESTS: + self._send_unary_request(request) + else: + self._rpc.send(request) def open(self, callback): """Begin consuming messages. diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index a6527dc4eb3b..61d040a26fc1 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -174,8 +174,41 @@ def test_resume_not_paused(): manager._consumer.resume.assert_not_called() -def test_send(): +def test_send_unary(): manager = make_manager() + manager._UNARY_REQUESTS = True + + manager.send(types.StreamingPullRequest( + ack_ids=['ack_id1', 'ack_id2'], + modify_deadline_ack_ids=['ack_id3', 'ack_id4', 'ack_id5'], + modify_deadline_seconds=[10, 20, 20])) + + manager._client.acknowledge.assert_called_once_with( + subscription=manager._subscription, ack_ids=['ack_id1', 'ack_id2']) + + manager._client.modify_ack_deadline.assert_has_calls([ + mock.call( + subscription=manager._subscription, + ack_ids=['ack_id3'], ack_deadline_seconds=10), + mock.call( + subscription=manager._subscription, + ack_ids=['ack_id4', 'ack_id5'], ack_deadline_seconds=20), + ], any_order=True) + + +def test_send_unary_empty(): + manager = make_manager() + manager._UNARY_REQUESTS = True + + manager.send(types.StreamingPullRequest()) + + manager._client.acknowledge.assert_not_called() + manager._client.modify_ack_deadline.assert_not_called() + + +def test_send_streaming(): + manager = make_manager() + manager._UNARY_REQUESTS = False manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) manager.send(mock.sentinel.request) From 58b6647c90b058027b40678e30a77950d27f5b50 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 15:29:35 -0700 Subject: [PATCH 35/75] Add link to streaming pull behavior documentation (#5378) --- pubsub/google/cloud/pubsub_v1/subscriber/client.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index c4906bbbeb21..ff1a76955a40 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -119,7 +119,13 @@ def subscribe( is encountered (such as loss of network connectivity). Cancelling the future will signal the process to shutdown gracefully and exit. - Example + .. note:: This uses Pub/Sub's *streaming pull* feature. This feature + properties that may be surprising. Please take a look at + https://cloud.google.com/pubsub/docs/pull#streamingpull for + more details on how streaming pull behaves compared to the + synchronous pull method. + + Example: .. code-block:: python From 416fbee0cad89337120f123cbf680e27d3b32a5c Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 15:32:56 -0700 Subject: [PATCH 36/75] Restore the synchronous pull method (#5379) --- pubsub/google/cloud/pubsub_v1/subscriber/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/client.py b/pubsub/google/cloud/pubsub_v1/subscriber/client.py index ff1a76955a40..4a9cf0d3a32b 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/client.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/client.py @@ -32,7 +32,7 @@ @_gapic.add_methods(subscriber_client.SubscriberClient, - blacklist=('pull', 'streaming_pull')) + blacklist=('streaming_pull',)) class Client(object): """A subscriber client for Google Cloud Pub/Sub. From 4a79dffa7563a5bc5b64c0aafbcb125fbc202f86 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 16:15:08 -0700 Subject: [PATCH 37/75] Don't build PDF --- .readthedocs.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index ecb0acf92626..607de497679a 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,5 +1,3 @@ -formats: -- pdf requirements_file: docs/requirements.txt build: image: latest From 1f8f4853e48069e6282e28437e7d84aecda0469d Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 23 May 2018 16:38:48 -0700 Subject: [PATCH 38/75] Release 0.35.0 (#5380) --- pubsub/CHANGELOG.md | 27 +++++++++++++++++++++++++++ pubsub/setup.py | 2 +- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/pubsub/CHANGELOG.md b/pubsub/CHANGELOG.md index 8c9f1422f625..82524c20d447 100644 --- a/pubsub/CHANGELOG.md +++ b/pubsub/CHANGELOG.md @@ -4,6 +4,33 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.35.0 + +### Implementation Changes + +- Send requests during streaming pull over a separate unary RPC (#5377) +- Initialize references to helper threads before starting them (#5374) +- Make leaser exit more quickly (#5373) +- Make re-open failures bubble to callbacks (#5372) +- Avoid overwriting '__module__' of messages from shared modules. (#5364) +- Normalize overflow handling for max count and bytes (#5343) + +### New Features + +- Restore the synchronous pull method (#5379) +- Promote subscribe_experimental() to subscribe(), remove old subscriber implementation. (#5274) +- Wire up scheduler argument for subscribe() (#5279) + +### Documentation + +- Add link to streaming pull behavior documentation (#5378) +- Fix example in subscribe's documentation (#5375) + +### Internal / Testing Changes + +- Add Test runs for Python 3.7 and remove 3.4 (#5295) +- Modify system tests to use prerelease versions of grpcio (#5304) + ## 0.34.0 ### Implementation Changes diff --git a/pubsub/setup.py b/pubsub/setup.py index e1237ac8d11e..a7ef56b86deb 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.34.0' +version = '0.35.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From ccdfbff8fcd5ef1cb8323a595dc9d5de3ee20ce5 Mon Sep 17 00:00:00 2001 From: zehauser Date: Thu, 24 May 2018 09:10:01 -0700 Subject: [PATCH 39/75] Fix filter_params argument in list_projects (#5383) The filter_params parameter of resource_manager.Client.list_projects is a dict (of properties to filter on and the desired value for each property). Prior to this commit, this dict was passed into the HTTPIterator constructor in extra_params['filter']. This didn't work, because extra_params values can't be dicts. They *can* be iterables, but since the iterable of a dict is its keys, this resulted in only the properties being sent along in the 'filter' query param, while the desired values for those properties were discarded. This commit transforms the filter_params dict into a simple list prior to shoving it in extra_params, which fixes the bug. --- resource_manager/google/cloud/resource_manager/client.py | 6 +++++- resource_manager/tests/unit/test_client.py | 3 ++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/resource_manager/google/cloud/resource_manager/client.py b/resource_manager/google/cloud/resource_manager/client.py index bc4c66e3a016..4dec6a24ef56 100644 --- a/resource_manager/google/cloud/resource_manager/client.py +++ b/resource_manager/google/cloud/resource_manager/client.py @@ -14,6 +14,7 @@ """A Client for interacting with the Resource Manager API.""" +import six from google.api_core import page_iterator from google.cloud.client import Client as BaseClient @@ -162,7 +163,10 @@ def list_projects(self, filter_params=None, page_size=None): extra_params['pageSize'] = page_size if filter_params is not None: - extra_params['filter'] = filter_params + extra_params['filter'] = [ + '{}:{}'.format(key, value) + for key, value in six.iteritems(filter_params) + ] return page_iterator.HTTPIterator( client=self, diff --git a/resource_manager/tests/unit/test_client.py b/resource_manager/tests/unit/test_client.py index 6eb82832b6b0..4731a24a57fe 100644 --- a/resource_manager/tests/unit/test_client.py +++ b/resource_manager/tests/unit/test_client.py @@ -217,12 +217,13 @@ def test_list_projects_with_filter(self): self.assertEqual(project.status, STATUS) # Check that the filter made it in the request. + FLATTENED_FILTER_PARAMS = ['id:project-id'] request, = client._connection._requested self.assertEqual(request, { 'path': '/projects', 'method': 'GET', 'query_params': { - 'filter': FILTER_PARAMS, + 'filter': FLATTENED_FILTER_PARAMS, }, }) From 2a6cbd20fac296e1e8b2597ecb604a2251c4babd Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 24 May 2018 09:40:09 -0700 Subject: [PATCH 40/75] Release IoT 0.1.0 (#5385) --- iot/CHANGELOG.md | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 iot/CHANGELOG.md diff --git a/iot/CHANGELOG.md b/iot/CHANGELOG.md new file mode 100644 index 000000000000..165e174fae3e --- /dev/null +++ b/iot/CHANGELOG.md @@ -0,0 +1,11 @@ +# Changelog + +[PyPI History][1] + +[1]: https://pypi.org/project/google-cloud-iot/#history + +## 0.1.0 + +### New Features +- Add v1 Endpoint for IoT (#5355) + From 82675154e4799d3e4ef7d68489cd527368eb5f12 Mon Sep 17 00:00:00 2001 From: shollyman Date: Thu, 24 May 2018 10:36:07 -0700 Subject: [PATCH 41/75] BigQuery: add ddl-related query stats. (#5382) * BigQuery: add ddl-related query stats. * Change docstrings from Union->Optional, add integration assertions. * assertTrue -> assertEqual --- bigquery/google/cloud/bigquery/job.py | 23 ++++++++++++++++ bigquery/tests/system.py | 4 +++ bigquery/tests/unit/test_job.py | 39 +++++++++++++++++++++++++++ 3 files changed, 66 insertions(+) diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 186beab31ba8..393dcdf5dad7 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -2266,6 +2266,29 @@ def cache_hit(self): """ return self._job_statistics().get('cacheHit') + @property + def ddl_operation_performed(self): + """Optional[str]: Return the DDL operation performed. + + See: + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlOperationPerformed + + """ + return self._job_statistics().get('ddlOperationPerformed') + + @property + def ddl_target_table(self): + """Optional[TableReference]: Return the DDL target table, present + for CREATE/DROP TABLE/VIEW queries. + + See: + https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#statistics.query.ddlTargetTable + """ + prop = self._job_statistics().get('ddlTargetTable') + if prop is not None: + prop = TableReference.from_api_repr(prop) + return prop + @property def num_dml_affected_rows(self): """Return the number of DML rows affected by the job. diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 03465ff8d594..0b4c861eff39 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -409,6 +409,10 @@ def _create_table_many_columns(self, rowcount): """.format(dsname, table_id, colprojections, rowcount) query_job = Config.CLIENT.query(sql) query_job.result() + self.assertEqual(query_job.statement_type, 'CREATE_TABLE_AS_SELECT') + self.assertEqual(query_job.ddl_operation_performed, 'CREATE') + self.assertEqual(query_job.ddl_target_table, table_ref) + return table_ref def test_query_many_columns(self): diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index d381c31895f3..a76d05f4a077 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -2454,6 +2454,45 @@ def test_cache_hit(self): query_stats['cacheHit'] = True self.assertTrue(job.cache_hit) + def test_ddl_operation_performed(self): + op = 'SKIP' + client = _make_client(project=self.PROJECT) + job = self._make_one(self.JOB_ID, self.QUERY, client) + self.assertIsNone(job.ddl_operation_performed) + + statistics = job._properties['statistics'] = {} + self.assertIsNone(job.ddl_operation_performed) + + query_stats = statistics['query'] = {} + self.assertIsNone(job.ddl_operation_performed) + + query_stats['ddlOperationPerformed'] = op + self.assertEqual(job.ddl_operation_performed, op) + + def test_ddl_target_table(self): + from google.cloud.bigquery.table import TableReference + + ref_table = { + 'projectId': self.PROJECT, + 'datasetId': 'ddl_ds', + 'tableId': 'targettable', + } + client = _make_client(project=self.PROJECT) + job = self._make_one(self.JOB_ID, self.QUERY, client) + self.assertIsNone(job.ddl_target_table) + + statistics = job._properties['statistics'] = {} + self.assertIsNone(job.ddl_target_table) + + query_stats = statistics['query'] = {} + self.assertIsNone(job.ddl_target_table) + + query_stats['ddlTargetTable'] = ref_table + self.assertIsInstance(job.ddl_target_table, TableReference) + self.assertEqual(job.ddl_target_table.table_id, 'targettable') + self.assertEqual(job.ddl_target_table.dataset_id, 'ddl_ds') + self.assertEqual(job.ddl_target_table.project, self.PROJECT) + def test_num_dml_affected_rows(self): num_rows = 1234 client = _make_client(project=self.PROJECT) From 2f5730dd61fccdb1bcf38d1dfc569f93e401b91c Mon Sep 17 00:00:00 2001 From: Luke Sneeringer Date: Thu, 24 May 2018 11:28:50 -0700 Subject: [PATCH 42/75] Make client_info work without gRPC installed. (#5075) --- .../google/api_core/gapic_v1/client_info.py | 20 +++++++++++++------ api_core/tests/unit/gapic/test_client_info.py | 9 +++++---- 2 files changed, 19 insertions(+), 10 deletions(-) diff --git a/api_core/google/api_core/gapic_v1/client_info.py b/api_core/google/api_core/gapic_v1/client_info.py index a76754dde838..7feeaf1eaae1 100644 --- a/api_core/google/api_core/gapic_v1/client_info.py +++ b/api_core/google/api_core/gapic_v1/client_info.py @@ -23,8 +23,13 @@ import pkg_resources _PY_VERSION = platform.python_version() -_GRPC_VERSION = pkg_resources.get_distribution('grpcio').version _API_CORE_VERSION = pkg_resources.get_distribution('google-api-core').version + +try: + _GRPC_VERSION = pkg_resources.get_distribution('grpcio').version +except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GRPC_VERSION = None + METRICS_METADATA_KEY = 'x-goog-api-client' @@ -38,7 +43,7 @@ class ClientInfo(object): Args: python_version (str): The Python interpreter version, for example, ``'2.7.13'``. - grpc_version (str): The gRPC library version. + grpc_version (Optional[str]): The gRPC library version. api_core_version (str): The google-api-core library version. gapic_version (Optional[str]): The sversion of gapic-generated client library, if the library was generated by gapic. @@ -66,15 +71,18 @@ def to_user_agent(self): # expects these items to be in specific locations. ua = 'gl-python/{python_version} ' - if self.client_library_version is not None: - ua += 'gccl/{client_library_version} ' + if self.grpc_version is not None: + ua += 'grpc/{grpc_version} ' + + ua += 'gax/{api_core_version} ' if self.gapic_version is not None: ua += 'gapic/{gapic_version} ' - ua += 'gax/{api_core_version} grpc/{grpc_version}' + if self.client_library_version is not None: + ua += 'gccl/{client_library_version} ' - return ua.format(**self.__dict__) + return ua.format(**self.__dict__).strip() def to_grpc_metadata(self): """Returns the gRPC metadata for this client info.""" diff --git a/api_core/tests/unit/gapic/test_client_info.py b/api_core/tests/unit/gapic/test_client_info.py index acbcb6aa9c6f..f83c4d558df6 100644 --- a/api_core/tests/unit/gapic/test_client_info.py +++ b/api_core/tests/unit/gapic/test_client_info.py @@ -44,12 +44,13 @@ def test_constructor_options(): def test_to_user_agent_minimal(): info = client_info.ClientInfo( python_version='1', - grpc_version='2', - api_core_version='3') + api_core_version='2', + grpc_version=None + ) user_agent = info.to_user_agent() - assert user_agent == 'gl-python/1 gax/3 grpc/2' + assert user_agent == 'gl-python/1 gax/2' def test_to_user_agent_full(): @@ -62,7 +63,7 @@ def test_to_user_agent_full(): user_agent = info.to_user_agent() - assert user_agent == 'gl-python/1 gccl/5 gapic/4 gax/3 grpc/2' + assert user_agent == 'gl-python/1 grpc/2 gax/3 gapic/4 gccl/5' def test_to_grpc_metadata(): From 122d884683e655c407fbef4411aa5938dba45457 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 24 May 2018 13:02:47 -0700 Subject: [PATCH 43/75] disable bigtable system tests (#5381) --- bigtable/tests/system.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index ff87f1bc9444..067bd1b86a2c 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -35,6 +35,7 @@ from test_utils.system import EmulatorCreds from test_utils.system import unique_resource_id +import pytest LOCATION_ID = 'us-central1-c' INSTANCE_ID = 'g-c-p' + unique_resource_id('-') @@ -116,6 +117,8 @@ def tearDown(self): for instance in self.instances_to_delete: instance.delete() + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_list_instances(self): instances_response = Config.CLIENT.list_instances() self.assertEqual(instances_response.failed_locations, []) @@ -190,12 +193,16 @@ def tearDown(self): for table in self.tables_to_delete: table.delete() + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_list_tables(self): # Since `Config.INSTANCE` is newly created in `setUpModule`, the table # created in `setUpClass` here will be the only one. tables = Config.INSTANCE.list_tables() self.assertEqual(tables, [self._table]) + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_create_table(self): temp_table_id = 'foo-bar-baz-table' temp_table = Config.INSTANCE.table(temp_table_id) @@ -212,6 +219,8 @@ def test_create_table(self): sorted_tables = sorted(tables, key=name_attr) self.assertEqual(sorted_tables, expected_tables) + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_create_column_family(self): temp_table_id = 'foo-bar-baz-table' temp_table = Config.INSTANCE.table(temp_table_id) @@ -233,6 +242,8 @@ def test_create_column_family(self): column_family.column_family_id) self.assertEqual(retrieved_col_fam.gc_rule, gc_rule) + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_update_column_family(self): temp_table_id = 'foo-bar-baz-table' temp_table = Config.INSTANCE.table(temp_table_id) @@ -256,6 +267,8 @@ def test_update_column_family(self): col_fams = temp_table.list_column_families() self.assertIsNone(col_fams[COLUMN_FAMILY_ID1].gc_rule) + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_delete_column_family(self): temp_table_id = 'foo-bar-baz-table' temp_table = Config.INSTANCE.table(temp_table_id) @@ -339,6 +352,8 @@ def _write_to_row(self, row1=None, row2=None, row3=None, row4=None): cell4 = Cell(CELL_VAL4, timestamp4_micros) return cell1, cell2, cell3, cell4 + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_timestamp_filter_millisecond_granularity(self): from google.cloud.bigtable import row_filters @@ -349,6 +364,8 @@ def test_timestamp_filter_millisecond_granularity(self): row_data = self._table.read_rows(filter_=timefilter) row_data.consume_all() + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_mutate_rows(self): row1 = self._table.row(ROW_KEY) row1.set_cell(COLUMN_FAMILY_ID1, COL_NAME1, CELL_VAL1) @@ -376,6 +393,8 @@ def test_mutate_rows(self): self.assertEqual( row2_data.cells[COLUMN_FAMILY_ID1][COL_NAME1][0].value, CELL_VAL4) + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_read_large_cell_limit(self): row = self._table.row(ROW_KEY) self.rows_to_delete.append(row) @@ -393,6 +412,8 @@ def test_read_large_cell_limit(self): self.assertEqual(len(column), 1) self.assertEqual(column[0].value, data) + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_read_row(self): row = self._table.row(ROW_KEY) self.rows_to_delete.append(row) @@ -417,6 +438,8 @@ def test_read_row(self): } self.assertEqual(partial_row_data.cells, expected_row_contents) + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_read_rows(self): row = self._table.row(ROW_KEY) row_alt = self._table.row(ROW_KEY_ALT) @@ -462,6 +485,8 @@ def test_read_rows(self): } self.assertEqual(rows_data.rows, expected_rows) + @pytest.mark.xfail(reason="https://github.com/GoogleCloudPlatform/" + "google-cloud-python/issues/5362") def test_read_with_label_applied(self): self._maybe_emulator_skip('Labels not supported by Bigtable emulator') row = self._table.row(ROW_KEY) From 26f1a97ec06b5c48df7a3c9eec00ae3985657164 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Thu, 24 May 2018 13:59:27 -0700 Subject: [PATCH 44/75] fix trove classifier (#5386) --- iot/setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/iot/setup.py b/iot/setup.py index bfe386092720..d7d6f737184f 100644 --- a/iot/setup.py +++ b/iot/setup.py @@ -20,7 +20,7 @@ name = 'google-cloud-iot' description = 'Cloud IoT API API client library' version = '0.1.0' -release_status = '3 - Alpha' +release_status = 'Development Status :: 3 - Alpha' dependencies = [ 'google-api-core[grpc] >= 1.1.0, < 2.0.0dev', 'grpc-google-iam-v1 >= 0.11.1, < 0.12dev', From 7fec7adcacb5f6855f656baae2cf570162d8b91c Mon Sep 17 00:00:00 2001 From: iddober Date: Tue, 29 May 2018 15:27:21 +0300 Subject: [PATCH 45/75] Fix rtype for 'Client.detect_language' for single values (#5397) --- translate/google/cloud/translate_v2/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/translate/google/cloud/translate_v2/client.py b/translate/google/cloud/translate_v2/client.py index 46705bd7513a..c3cbe58eb967 100644 --- a/translate/google/cloud/translate_v2/client.py +++ b/translate/google/cloud/translate_v2/client.py @@ -103,7 +103,7 @@ def detect_language(self, values): :param values: String or list of strings that will have language detected. - :rtype: str or list + :rtype: dict or list :returns: A list of dictionaries for each queried value. Each dictionary typically contains three keys From 4e98d3be6f90cd2b675676c3433c41f9e15ed7d4 Mon Sep 17 00:00:00 2001 From: Aneep Tandel Date: Tue, 29 May 2018 18:19:12 +0530 Subject: [PATCH 46/75] BigTable: Add data app profile id (#5369) --- bigtable/google/cloud/bigtable/table.py | 43 ++++++++++++++++++------- bigtable/tests/unit/test_table.py | 40 +++++++++++++++++------ 2 files changed, 62 insertions(+), 21 deletions(-) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 818806ca41b9..22e889588d32 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -84,11 +84,15 @@ class Table(object): :type instance: :class:`~google.cloud.bigtable.instance.Instance` :param instance: The instance that owns the table. + + :type: app_profile_id: str + :param app_profile_id: (Optional) The unique name of the AppProfile. """ - def __init__(self, table_id, instance): + def __init__(self, table_id, instance, app_profile_id=None): self.table_id = table_id self._instance = instance + self._app_profile_id = app_profile_id @property def name(self): @@ -227,8 +231,9 @@ def read_row(self, row_key, filter_=None): :raises: :class:`ValueError ` if a commit row chunk is never encountered. """ - request_pb = _create_row_request(self.name, row_key=row_key, - filter_=filter_) + request_pb = _create_row_request( + self.name, row_key=row_key, filter_=filter_, + app_profile_id=self._app_profile_id) client = self._instance._client rows_data = PartialRowsData(client._table_data_client._read_rows, request_pb) @@ -276,7 +281,8 @@ def read_rows(self, start_key=None, end_key=None, limit=None, """ request_pb = _create_row_request( self.name, start_key=start_key, end_key=end_key, filter_=filter_, - limit=limit, end_inclusive=end_inclusive) + limit=limit, end_inclusive=end_inclusive, + app_profile_id=self._app_profile_id) client = self._instance._client return PartialRowsData(client._table_data_client._read_rows, request_pb) @@ -310,7 +316,7 @@ def yield_rows(self, start_key=None, end_key=None, limit=None, """ request_pb = _create_row_request( self.name, start_key=start_key, end_key=end_key, filter_=filter_, - limit=limit) + limit=limit, app_profile_id=self._app_profile_id) client = self._instance._client generator = YieldRowsData(client._table_data_client._read_rows, request_pb) @@ -347,7 +353,8 @@ def mutate_rows(self, rows, retry=DEFAULT_RETRY): sent. These will be in the same order as the `rows`. """ retryable_mutate_rows = _RetryableMutateRowsWorker( - self._instance._client, self.name, rows) + self._instance._client, self.name, rows, + app_profile_id=self._app_profile_id) return retryable_mutate_rows(retry=retry) def sample_row_keys(self): @@ -383,7 +390,7 @@ def sample_row_keys(self): """ client = self._instance._client response_iterator = client._table_data_client.sample_row_keys( - self.name) + self.name, app_profile_id=self._app_profile_id) return response_iterator @@ -403,10 +410,11 @@ class _RetryableMutateRowsWorker(object): ) # pylint: enable=unsubscriptable-object - def __init__(self, client, table_name, rows): + def __init__(self, client, table_name, rows, app_profile_id=None): self.client = client self.table_name = table_name self.rows = rows + self.app_profile_id = app_profile_id self.responses_statuses = [None] * len(self.rows) def __call__(self, retry=DEFAULT_RETRY): @@ -468,7 +476,8 @@ def _do_mutate_retryable_rows(self): return self.responses_statuses mutate_rows_request = _mutate_rows_request( - self.table_name, retryable_rows) + self.table_name, retryable_rows, + app_profile_id=self.app_profile_id) responses = self.client._table_data_client._mutate_rows( mutate_rows_request, retry=None) @@ -496,7 +505,8 @@ def _do_mutate_retryable_rows(self): def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, - filter_=None, limit=None, end_inclusive=False): + filter_=None, limit=None, end_inclusive=False, + app_profile_id=None): """Creates a request to read rows in a table. :type table_name: str @@ -528,6 +538,9 @@ def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, :param end_inclusive: (Optional) Whether the ``end_key`` should be considered inclusive. The default is False (exclusive). + :type: app_profile_id: str + :param app_profile_id: (Optional) The unique name of the AppProfile. + :rtype: :class:`data_messages_v2_pb2.ReadRowsRequest` :returns: The ``ReadRowsRequest`` protobuf corresponding to the inputs. :raises: :class:`ValueError ` if both @@ -551,6 +564,8 @@ def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, request_kwargs['filter'] = filter_.to_pb() if limit is not None: request_kwargs['rows_limit'] = limit + if app_profile_id is not None: + request_kwargs['app_profile_id'] = app_profile_id message = data_messages_v2_pb2.ReadRowsRequest(**request_kwargs) @@ -563,7 +578,7 @@ def _create_row_request(table_name, row_key=None, start_key=None, end_key=None, return message -def _mutate_rows_request(table_name, rows): +def _mutate_rows_request(table_name, rows, app_profile_id=None): """Creates a request to mutate rows in a table. :type table_name: str @@ -572,12 +587,16 @@ def _mutate_rows_request(table_name, rows): :type rows: list :param rows: List or other iterable of :class:`.DirectRow` instances. + :type: app_profile_id: str + :param app_profile_id: (Optional) The unique name of the AppProfile. + :rtype: :class:`data_messages_v2_pb2.MutateRowsRequest` :returns: The ``MutateRowsRequest`` protobuf corresponding to the inputs. :raises: :exc:`~.table.TooManyMutationsError` if the number of mutations is greater than 100,000 """ - request_pb = data_messages_v2_pb2.MutateRowsRequest(table_name=table_name) + request_pb = data_messages_v2_pb2.MutateRowsRequest( + table_name=table_name, app_profile_id=app_profile_id) mutations_count = 0 for row in rows: _check_row_table_name(table_name, row) diff --git a/bigtable/tests/unit/test_table.py b/bigtable/tests/unit/test_table.py index 037a57235391..485d22278033 100644 --- a/bigtable/tests/unit/test_table.py +++ b/bigtable/tests/unit/test_table.py @@ -342,7 +342,7 @@ def _list_column_families_helper(self): def test_list_column_families(self): self._list_column_families_helper() - def _read_row_helper(self, chunks, expected_result): + def _read_row_helper(self, chunks, expected_result, app_profile_id=None): from google.cloud._testing import _Monkey from google.cloud.bigtable import table as MUT from google.cloud.bigtable_v2.gapic import bigtable_client @@ -356,14 +356,16 @@ def _read_row_helper(self, chunks, expected_result): client = self._make_client(project='project-id', credentials=credentials, admin=True) instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) + table = self._make_one(self.TABLE_ID, instance, + app_profile_id=app_profile_id) # Create request_pb request_pb = object() # Returned by our mock. mock_created = [] - def mock_create_row_request(table_name, row_key, filter_): - mock_created.append((table_name, row_key, filter_)) + def mock_create_row_request(table_name, row_key, filter_, + app_profile_id=app_profile_id): + mock_created.append((table_name, row_key, filter_, app_profile_id)) return request_pb # Create response_iterator @@ -386,7 +388,8 @@ def mock_create_row_request(table_name, row_key, filter_): self.assertEqual(result, expected_result) self.assertEqual(mock_created, - [(table.name, self.ROW_KEY, filter_obj)]) + [(table.name, self.ROW_KEY, filter_obj, + app_profile_id)]) def test_read_row_miss_no__responses(self): self._read_row_helper(None, None) @@ -399,6 +402,7 @@ def test_read_row_complete(self): from google.cloud.bigtable.row_data import Cell from google.cloud.bigtable.row_data import PartialRowData + app_profile_id = 'app-profile-id' chunk = _ReadRowsResponseCellChunkPB( row_key=self.ROW_KEY, family_name=self.FAMILY_NAME, @@ -412,7 +416,7 @@ def test_read_row_complete(self): family = expected_result._cells.setdefault(self.FAMILY_NAME, {}) column = family.setdefault(self.QUALIFIER, []) column.append(Cell.from_pb(chunk)) - self._read_row_helper(chunks, expected_result) + self._read_row_helper(chunks, expected_result, app_profile_id) def test_read_row_still_partial(self): chunk = _ReadRowsResponseCellChunkPB( @@ -470,7 +474,9 @@ def test_read_rows(self): client._table_data_client = data_api client._table_admin_client = table_api instance = client.instance(instance_id=self.INSTANCE_ID) - table = self._make_one(self.TABLE_ID, instance) + app_profile_id = 'app-profile-id' + table = self._make_one(self.TABLE_ID, instance, + app_profile_id=app_profile_id) # Create request_pb request = object() # Returned by our mock. @@ -502,6 +508,7 @@ def mock_create_row_request(table_name, **kwargs): 'filter_': filter_obj, 'limit': limit, 'end_inclusive': False, + 'app_profile_id': app_profile_id } self.assertEqual(mock_created, [(table.name, created_kwargs)]) @@ -1146,12 +1153,14 @@ def test_do_mutate_retryable_rows_mismatch_num_responses(self): class Test__create_row_request(unittest.TestCase): def _call_fut(self, table_name, row_key=None, start_key=None, end_key=None, - filter_=None, limit=None, end_inclusive=False): + filter_=None, limit=None, end_inclusive=False, + app_profile_id=None): from google.cloud.bigtable.table import _create_row_request return _create_row_request( table_name, row_key=row_key, start_key=start_key, end_key=end_key, - filter_=filter_, limit=limit, end_inclusive=end_inclusive) + filter_=filter_, limit=limit, end_inclusive=end_inclusive, + app_profile_id=app_profile_id) def test_table_name_only(self): table_name = 'table_name' @@ -1234,6 +1243,19 @@ def test_with_limit(self): ) self.assertEqual(result, expected_result) + def test_with_app_profile_id(self): + table_name = 'table_name' + limit = 1337 + app_profile_id = 'app-profile-id' + result = self._call_fut(table_name, limit=limit, + app_profile_id=app_profile_id) + expected_result = _ReadRowsRequestPB( + table_name=table_name, + rows_limit=limit, + app_profile_id=app_profile_id + ) + self.assertEqual(result, expected_result) + def _ReadRowsRequestPB(*args, **kw): from google.cloud.bigtable_v2.proto import ( From fe57c5a0d44ca5f3daea2625dfd32e99f55eaa2a Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Tue, 29 May 2018 11:04:05 -0700 Subject: [PATCH 47/75] BigQuery: Adds load_table_from_dataframe() and snippet (#5387) * Adds load_table_from_dataframe() and snippet * Add index to DataFrame in bigquery_load_table_dataframe sample --- bigquery/google/cloud/bigquery/client.py | 61 ++++++++++++++++++++- bigquery/nox.py | 4 +- bigquery/setup.py | 1 + bigquery/tests/unit/test_client.py | 70 ++++++++++++++++++++++++ docs/bigquery/snippets.py | 49 ++++++++++++++++- 5 files changed, 180 insertions(+), 5 deletions(-) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index 5ac988984617..8e303610d082 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -773,8 +773,8 @@ def load_table_from_file( job_config=None): """Upload the contents of this table from a file-like object. - Like load_table_from_uri, this creates, starts and returns - a ``LoadJob``. + Similar to :meth:`load_table_from_uri`, this method creates, starts and + returns a :class:`~google.cloud.bigquery.job.LoadJob`. Arguments: file_obj (file): A file handle opened in binary mode for reading. @@ -833,6 +833,63 @@ def load_table_from_file( raise exceptions.from_http_response(exc.response) return self.job_from_resource(response.json()) + def load_table_from_dataframe(self, dataframe, destination, + num_retries=_DEFAULT_NUM_RETRIES, + job_id=None, job_id_prefix=None, + location=None, project=None, + job_config=None): + """Upload the contents of a table from a pandas DataFrame. + + Similar to :meth:`load_table_from_uri`, this method creates, starts and + returns a :class:`~google.cloud.bigquery.job.LoadJob`. + + Arguments: + dataframe (pandas.DataFrame): + A :class:`~pandas.DataFrame` containing the data to load. + destination (google.cloud.bigquery.table.TableReference): + The destination table to use for loading the data. If it is an + existing table, the schema of the :class:`~pandas.DataFrame` + must match the schema of the destination table. If the table + does not yet exist, the schema is inferred from the + :class:`~pandas.DataFrame`. + + Keyword Arguments: + num_retries (int, optional): Number of upload retries. + job_id (str, optional): Name of the job. + job_id_prefix (str, optional): + The user-provided prefix for a randomly generated + job ID. This parameter will be ignored if a ``job_id`` is + also given. + location (str): + Location where to run the job. Must match the location of the + destination table. + project (str, optional): + Project ID of the project of where to run the job. Defaults + to the client's project. + job_config (google.cloud.bigquery.job.LoadJobConfig, optional): + Extra configuration options for the job. + + Returns: + google.cloud.bigquery.job.LoadJob: A new load job. + + Raises: + ImportError: + If a usable parquet engine cannot be found. This method + requires one of :mod:`pyarrow` or :mod:`fastparquet` to be + installed. + """ + buffer = six.BytesIO() + dataframe.to_parquet(buffer) + + if job_config is None: + job_config = job.LoadJobConfig() + job_config.source_format = job.SourceFormat.PARQUET + + return self.load_table_from_file( + buffer, destination, num_retries=num_retries, rewind=True, + job_id=job_id, job_id_prefix=job_id_prefix, location=location, + project=project, job_config=job_config) + def _do_resumable_upload(self, stream, metadata, num_retries): """Perform a resumable upload. diff --git a/bigquery/nox.py b/bigquery/nox.py index a286b8651462..f0ddeadd8ac7 100644 --- a/bigquery/nox.py +++ b/bigquery/nox.py @@ -41,7 +41,7 @@ def default(session): if session.interpreter == 'python3.4': session.install('-e', '.') else: - session.install('-e', '.[pandas]') + session.install('-e', '.[pandas, pyarrow]') # IPython does not support Python 2 after version 5.x if session.interpreter == 'python2.7': @@ -142,7 +142,7 @@ def snippets(session, py): os.path.join('..', 'storage'), os.path.join('..', 'test_utils'), ) - session.install('-e', '.[pandas]') + session.install('-e', '.[pandas, pyarrow]') # Run py.test against the system tests. session.run( diff --git a/bigquery/setup.py b/bigquery/setup.py index 28c44ab72bcd..75348ce203e3 100644 --- a/bigquery/setup.py +++ b/bigquery/setup.py @@ -35,6 +35,7 @@ ] extras = { 'pandas': 'pandas>=0.17.1', + 'pyarrow': 'pyarrow>=0.4.1', } diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index 427f403e6be5..785f98b8e8bf 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -23,6 +23,14 @@ import six from six.moves import http_client import pytest +try: + import pandas +except (ImportError, AttributeError): # pragma: NO COVER + pandas = None +try: + import pyarrow +except (ImportError, AttributeError): # pragma: NO COVER + pyarrow = None from google.cloud.bigquery.dataset import DatasetReference @@ -3484,6 +3492,68 @@ def test_load_table_from_file_bad_mode(self): with pytest.raises(ValueError): client.load_table_from_file(file_obj, self.TABLE_REF) + @unittest.skipIf(pandas is None, 'Requires `pandas`') + @unittest.skipIf(pyarrow is None, 'Requires `pyarrow`') + def test_load_table_from_dataframe(self): + from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES + from google.cloud.bigquery import job + + client = self._make_client() + records = [ + {'name': 'Monty', 'age': 100}, + {'name': 'Python', 'age': 60}, + ] + dataframe = pandas.DataFrame(records) + + load_patch = mock.patch( + 'google.cloud.bigquery.client.Client.load_table_from_file', + autospec=True) + with load_patch as load_table_from_file: + client.load_table_from_dataframe(dataframe, self.TABLE_REF) + + load_table_from_file.assert_called_once_with( + client, mock.ANY, self.TABLE_REF, num_retries=_DEFAULT_NUM_RETRIES, + rewind=True, job_id=None, job_id_prefix=None, location=None, + project=None, job_config=mock.ANY) + + sent_file = load_table_from_file.mock_calls[0][1][1] + sent_bytes = sent_file.getvalue() + assert isinstance(sent_bytes, bytes) + assert len(sent_bytes) > 0 + + sent_config = load_table_from_file.mock_calls[0][2]['job_config'] + assert sent_config.source_format == job.SourceFormat.PARQUET + + @unittest.skipIf(pandas is None, 'Requires `pandas`') + @unittest.skipIf(pyarrow is None, 'Requires `pyarrow`') + def test_load_table_from_dataframe_w_custom_job_config(self): + from google.cloud.bigquery.client import _DEFAULT_NUM_RETRIES + from google.cloud.bigquery import job + + client = self._make_client() + records = [ + {'name': 'Monty', 'age': 100}, + {'name': 'Python', 'age': 60}, + ] + dataframe = pandas.DataFrame(records) + job_config = job.LoadJobConfig() + + load_patch = mock.patch( + 'google.cloud.bigquery.client.Client.load_table_from_file', + autospec=True) + with load_patch as load_table_from_file: + client.load_table_from_dataframe( + dataframe, self.TABLE_REF, job_config=job_config) + + load_table_from_file.assert_called_once_with( + client, mock.ANY, self.TABLE_REF, num_retries=_DEFAULT_NUM_RETRIES, + rewind=True, job_id=None, job_id_prefix=None, location=None, + project=None, job_config=mock.ANY) + + sent_config = load_table_from_file.mock_calls[0][2]['job_config'] + assert sent_config is job_config + assert sent_config.source_format == job.SourceFormat.PARQUET + # Low-level tests @classmethod diff --git a/docs/bigquery/snippets.py b/docs/bigquery/snippets.py index 2db75242718e..33939ff9ec3c 100644 --- a/docs/bigquery/snippets.py +++ b/docs/bigquery/snippets.py @@ -31,8 +31,12 @@ import six try: import pandas -except ImportError: +except (ImportError, AttributeError): pandas = None +try: + import pyarrow +except (ImportError, AttributeError): + pyarrow = None from google.cloud import bigquery @@ -2073,5 +2077,48 @@ def test_list_rows_as_dataframe(client): assert len(df) == table.num_rows # verify the number of rows +@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') +@pytest.mark.skipif(pyarrow is None, reason='Requires `pyarrow`') +def test_load_table_from_dataframe(client, to_delete): + dataset_id = 'load_table_dataframe_dataset_{}'.format(_millis()) + dataset = bigquery.Dataset(client.dataset(dataset_id)) + client.create_dataset(dataset) + to_delete.append(dataset) + + # [START bigquery_load_table_dataframe] + # from google.cloud import bigquery + # client = bigquery.Client() + # dataset_id = 'my_dataset' + + dataset_ref = client.dataset(dataset_id) + table_ref = dataset_ref.table('monty_python') + records = [ + {'title': 'The Meaning of Life', 'release_year': 1983}, + {'title': 'Monty Python and the Holy Grail', 'release_year': 1975}, + {'title': 'Life of Brian', 'release_year': 1979}, + { + 'title': 'And Now for Something Completely Different', + 'release_year': 1971 + }, + ] + # Optionally set explicit indices. + # If indices are not specified, a column will be created for the default + # indices created by pandas. + index = ['Q24980', 'Q25043', 'Q24953', 'Q16403'] + dataframe = pandas.DataFrame( + records, index=pandas.Index(index, name='wikidata_id')) + + job = client.load_table_from_dataframe(dataframe, table_ref, location='US') + + job.result() # Waits for table load to complete. + + assert job.state == 'DONE' + table = client.get_table(table_ref) + assert table.num_rows == 4 + # [END bigquery_load_table_dataframe] + column_names = [field.name for field in table.schema] + assert sorted(column_names) == ['release_year', 'title', 'wikidata_id'] + + if __name__ == '__main__': pytest.main() From fd7938f32f8a34d5f7294ea526886df4b0a21e0e Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 29 May 2018 11:15:35 -0700 Subject: [PATCH 48/75] BigQuery: fix IPython system test for new pytest (#5400) * BigQuery: fix IPython system test for new pytest Pytest changes the way that marks and fixtures work in pytest 3.6.0 that breaks when using unittest-style tests. Use pytest-style test for the IPython system test so that the fixture gets used. * flake8 fix --- bigquery/tests/system.py | 61 ++++++++++++++++++++-------------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/bigquery/tests/system.py b/bigquery/tests/system.py index 0b4c861eff39..2b0afb5fdb0f 100644 --- a/bigquery/tests/system.py +++ b/bigquery/tests/system.py @@ -1797,36 +1797,37 @@ def temp_dataset(self, dataset_id, location=None): self.to_delete.append(dataset) return dataset - @pytest.mark.skipif(pandas is None, reason='Requires `pandas`') - @pytest.mark.skipif(IPython is None, reason='Requires `ipython`') - @pytest.mark.usefixtures('ipython_interactive') - def test_bigquery_magic(self): - ip = IPython.get_ipython() - ip.extension_manager.load_extension('google.cloud.bigquery') - sql = """ - SELECT - CONCAT( - 'https://stackoverflow.com/questions/', - CAST(id as STRING)) as url, - view_count - FROM `bigquery-public-data.stackoverflow.posts_questions` - WHERE tags like '%google-bigquery%' - ORDER BY view_count DESC - LIMIT 10 - """ - with io.capture_output() as captured: - result = ip.run_cell_magic('bigquery', '', sql) - - lines = re.split('\n|\r', captured.stdout) - # Removes blanks & terminal code (result of display clearing) - updates = list(filter(lambda x: bool(x) and x != '\x1b[2K', lines)) - assert re.match("Executing query with job ID: .*", updates[0]) - assert all(re.match("Query executing: .*s", line) - for line in updates[1:-1]) - assert re.match("Query complete after .*s", updates[-1]) - assert isinstance(result, pandas.DataFrame) - assert len(result) == 10 # verify row count - assert list(result) == ['url', 'view_count'] # verify column names + +@pytest.mark.skipif(pandas is None, reason='Requires `pandas`') +@pytest.mark.skipif(IPython is None, reason='Requires `ipython`') +@pytest.mark.usefixtures('ipython_interactive') +def test_bigquery_magic(): + ip = IPython.get_ipython() + ip.extension_manager.load_extension('google.cloud.bigquery') + sql = """ + SELECT + CONCAT( + 'https://stackoverflow.com/questions/', + CAST(id as STRING)) as url, + view_count + FROM `bigquery-public-data.stackoverflow.posts_questions` + WHERE tags like '%google-bigquery%' + ORDER BY view_count DESC + LIMIT 10 + """ + with io.capture_output() as captured: + result = ip.run_cell_magic('bigquery', '', sql) + + lines = re.split('\n|\r', captured.stdout) + # Removes blanks & terminal code (result of display clearing) + updates = list(filter(lambda x: bool(x) and x != '\x1b[2K', lines)) + assert re.match("Executing query with job ID: .*", updates[0]) + assert all(re.match("Query executing: .*s", line) + for line in updates[1:-1]) + assert re.match("Query complete after .*s", updates[-1]) + assert isinstance(result, pandas.DataFrame) + assert len(result) == 10 # verify row count + assert list(result) == ['url', 'view_count'] # verify column names def _job_done(instance): From 381b6271f43e71a7bbe2f6cea75d74e78f1f27c0 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 29 May 2018 12:27:39 -0700 Subject: [PATCH 49/75] Catch errors when re-retying send() or recv() in addition to open() (#5402) --- .../cloud/pubsub_v1/subscriber/_protocol/bidi.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index 518c7c91275e..6e361a1e1ff8 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -342,13 +342,7 @@ def _reopen(self): # has exited. self.request_generator = None - try: - self.open() - # If re-opening fails, consider this a terminal error and finalize - # the object. - except Exception as exc: - self._finalize(exc) - raise + self.open() def _recoverable(self, method, *args, **kwargs): """Wraps a method to recover the stream and retry on error. @@ -370,9 +364,14 @@ def _recoverable(self, method, *args, **kwargs): self.close() raise exc + try: self._reopen() - return method(*args, **kwargs) + # If re-opening or re-calling the method fails for any reason, consider + # it a terminal error and finalize the object. + except Exception as exc: + self._finalize(exc) + raise def send(self, request): return self._recoverable( From 8b30a8b8d9ab1245b23461e19ec36e77465cf2f7 Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Tue, 29 May 2018 12:52:17 -0700 Subject: [PATCH 50/75] BigQuery: Adds Python 3.7 and removes Python 3.4 (#5401) * BigQuery: Adds Python 3.7 and removes Python 3.4 --- bigquery/nox.py | 6 +++--- bigquery/setup.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bigquery/nox.py b/bigquery/nox.py index f0ddeadd8ac7..9f9eccd4db0a 100644 --- a/bigquery/nox.py +++ b/bigquery/nox.py @@ -37,8 +37,8 @@ def default(session): # Install all test dependencies, then install this package in-place. session.install('mock', 'pytest', 'pytest-cov', *LOCAL_DEPS) - # Pandas does not support Python 3.4 - if session.interpreter == 'python3.4': + # Pandas does not support Python 3.7 + if session.interpreter == 'python3.7': session.install('-e', '.') else: session.install('-e', '.[pandas, pyarrow]') @@ -65,7 +65,7 @@ def default(session): @nox.session -@nox.parametrize('py', ['2.7', '3.5', '3.6']) +@nox.parametrize('py', ['2.7', '3.5', '3.6', '3.7']) def unit(session, py): """Run the unit test suite.""" diff --git a/bigquery/setup.py b/bigquery/setup.py index 75348ce203e3..c473dbcc2333 100644 --- a/bigquery/setup.py +++ b/bigquery/setup.py @@ -76,9 +76,9 @@ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Operating System :: OS Independent', 'Topic :: Internet', ], From d9247236cf65bc0c7ceda51d61ecbe6013d11d92 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Tue, 29 May 2018 14:01:52 -0700 Subject: [PATCH 51/75] Release pubsub 0.35.1 (#5404) * Release 0.35.1 --- pubsub/CHANGELOG.md | 5 +++++ pubsub/setup.py | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/pubsub/CHANGELOG.md b/pubsub/CHANGELOG.md index 82524c20d447..3fe9694dae35 100644 --- a/pubsub/CHANGELOG.md +++ b/pubsub/CHANGELOG.md @@ -4,6 +4,11 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.35.1 + +### Implementation Changes +- Catch errors when re-retying send() or recv() in addition to open() (#5402) + ## 0.35.0 ### Implementation Changes diff --git a/pubsub/setup.py b/pubsub/setup.py index a7ef56b86deb..8d2bd4a24661 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.35.0' +version = '0.35.1' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 225a20eef489d97acac06120afb3af23b2dca3cb Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 29 May 2018 18:13:51 -0400 Subject: [PATCH 52/75] Attempt again to reproduce #4264. (#5403) * Add utility to populate / clear entities w/ timestamp (int) keys. * Clean up the logic for deleting kinds with more than 500 entities. --- datastore/tests/system/test_system.py | 19 +++++- .../tests/system/utils/clear_datastore.py | 58 +++++++++---------- .../tests/system/utils/populate_datastore.py | 52 +++++++++++++++-- 3 files changed, 94 insertions(+), 35 deletions(-) diff --git a/datastore/tests/system/test_system.py b/datastore/tests/system/test_system.py index 4532cd3a9737..cadfbc8403f4 100644 --- a/datastore/tests/system/test_system.py +++ b/datastore/tests/system/test_system.py @@ -363,7 +363,7 @@ def test_projection_query(self): sansa_dict = dict(sansa_entity) self.assertEqual(sansa_dict, {'name': 'Sansa', 'family': 'Stark'}) - def test_query_paginate_simple(self): + def test_query_paginate_simple_uuid_keys(self): # See issue #4264 page_query = self.CLIENT.query(kind='uuid_key') @@ -380,6 +380,23 @@ def test_query_paginate_simple(self): self.assertTrue(page_count > 1) + def test_query_paginate_simple_timestamp_keys(self): + + # See issue #4264 + page_query = self.CLIENT.query(kind='timestamp_key') + iterator = page_query.fetch() + + seen = set() + page_count = 0 + for page in iterator.pages: + page_count += 1 + for entity in page: + timestamp = entity.key.id + self.assertNotIn(timestamp, seen, timestamp) + seen.add(timestamp) + + self.assertTrue(page_count > 1) + def test_query_paginate_with_offset(self): page_query = self._base_query() page_query.order = 'appearances' diff --git a/datastore/tests/system/utils/clear_datastore.py b/datastore/tests/system/utils/clear_datastore.py index 3a09bff19898..5820fbf7d2b1 100644 --- a/datastore/tests/system/utils/clear_datastore.py +++ b/datastore/tests/system/utils/clear_datastore.py @@ -17,6 +17,7 @@ from __future__ import print_function import os +import sys import six @@ -31,8 +32,10 @@ 'Person', 'Post', 'uuid_key', + 'timestamp_key', ) TRANSACTION_MAX_GROUPS = 5 +MAX_DEL_ENTITIES = 500 def print_func(message): @@ -40,16 +43,6 @@ def print_func(message): print(message) -def fetch_keys(kind, client, fetch_max=FETCH_MAX, query=None, cursor=None): - if query is None: - query = client.query(kind=kind) - query.keys_only() - - iterator = query.fetch(limit=fetch_max, start_cursor=cursor) - page = six.next(iterator.pages) - return query, list(page), iterator.next_page_token - - def get_ancestors(entities): # NOTE: A key will always have at least one path element. key_roots = [entity.key.flat_path[:2] for entity in entities] @@ -57,15 +50,16 @@ def get_ancestors(entities): return list(set(key_roots)) -def remove_kind(kind, client): - results = [] +def delete_chunks(client, results): + while results: + chunk, results = results[:MAX_DEL_ENTITIES], results[MAX_DEL_ENTITIES:] + client.delete_multi([result.key for result in chunk]) - query, curr_results, cursor = fetch_keys(kind, client) - results.extend(curr_results) - while curr_results: - query, curr_results, cursor = fetch_keys( - kind, client, query=query, cursor=cursor) - results.extend(curr_results) + +def remove_kind(kind, client): + query = client.query(kind=kind) + query.keys_only() + results = list(query.fetch()) if not results: return @@ -80,26 +74,32 @@ def remove_kind(kind, client): if len(ancestors) > TRANSACTION_MAX_GROUPS: delete_outside_transaction = True else: - client.delete_multi([result.key for result in results]) + delete_chunks(client, results) if delete_outside_transaction: - client.delete_multi([result.key for result in results]) + delete_chunks(client, results) -def remove_all_entities(client=None): - if client is None: - # Get a client that uses the test dataset. - client = datastore.Client() - for kind in ALL_KINDS: - remove_kind(kind, client) +def main(): + client = datastore.Client() + kinds = sys.argv[1:] + if len(kinds) == 0: + kinds = ALL_KINDS -if __name__ == '__main__': print_func('This command will remove all entities for ' 'the following kinds:') - print_func('\n'.join('- ' + val for val in ALL_KINDS)) + print_func('\n'.join('- ' + val for val in kinds)) response = six.moves.input('Is this OK [y/n]? ') + if response.lower() == 'y': - remove_all_entities() + + for kind in kinds: + remove_kind(kind, client) + else: print_func('Doing nothing.') + + +if __name__ == '__main__': + main() diff --git a/datastore/tests/system/utils/populate_datastore.py b/datastore/tests/system/utils/populate_datastore.py index 636b9a4af0cd..9dcadd00dfb1 100644 --- a/datastore/tests/system/utils/populate_datastore.py +++ b/datastore/tests/system/utils/populate_datastore.py @@ -18,6 +18,8 @@ from __future__ import print_function import os +import sys +import time import uuid import six @@ -104,7 +106,7 @@ def add_characters(client=None): character['family'])) -def add_uid_keys(client): +def add_uid_keys(client=None): if client is None: # Get a client that uses the test dataset. client = datastore.Client() @@ -112,19 +114,59 @@ def add_uid_keys(client): num_batches = 2 batch_size = 500 - keys = [] for batch_num in range(num_batches): with client.batch() as batch: for seq_no in range(batch_size): uid = str(uuid.uuid4()) key = client.key('uuid_key', uid) - keys.append(key) entity = datastore.Entity(key=key) entity['batch_num'] = batch_num entity['seq_no'] = seq_no batch.put(entity) +def add_timestamp_keys(client=None): + if client is None: + # Get a client that uses the test dataset. + client = datastore.Client() + + num_batches = 2 + batch_size = 500 + + timestamp_micros = set() + for batch_num in range(num_batches): + with client.batch() as batch: + for seq_no in range(batch_size): + print( + "time_time: batch: {}, sequence: {}".format( + batch_num, seq_no)) + now_micros = int(time.time() * 1e6) + while now_micros in timestamp_micros: + now_micros = int(time.time() * 1e6) + timestamp_micros.add(now_micros) + key = client.key('timestamp_key', now_micros) + entity = datastore.Entity(key=key) + entity['batch_num'] = batch_num + entity['seq_no'] = seq_no + batch.put(entity) + + +def main(): + client = datastore.Client() + flags = sys.argv[1:] + + if len(flags) == 0: + flags = ['--characters', '--uuid', '--timestamps'] + + if '--characters' in flags: + add_characters(client) + + if '--uuid' in flags: + add_uid_keys(client) + + if '--timestamps' in flags: + add_timestamp_keys(client) + + if __name__ == '__main__': - add_characters() - add_uid_keys() + main() From 0f2b1af8d4fac254e1b0ac84d7aed45d2b0b1ddb Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 30 May 2018 10:50:20 -0700 Subject: [PATCH 53/75] Fix retrying of bidirectional RPCs and closing the streaming pull manager (#5412) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 46 ++++++++++++------- .../_protocol/streaming_pull_manager.py | 20 +++++++- .../subscriber/test_streaming_pull_manager.py | 10 ++-- 3 files changed, 54 insertions(+), 22 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index 6e361a1e1ff8..80824c55022b 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -342,36 +342,48 @@ def _reopen(self): # has exited. self.request_generator = None - self.open() + # Note: we do not currently do any sort of backoff here. The + # assumption is that re-establishing the stream under normal + # circumstances will happen in intervals greater than 60s. + # However, it is possible in a degenerative case that the server + # closes the stream rapidly which would lead to thrashing here, + # but hopefully in those cases the server would return a non- + # retryable error. + + try: + self.open() + # If re-opening or re-calling the method fails for any reason, + # consider it a terminal error and finalize the stream. + except Exception as exc: + self._finalize(exc) + raise + + _LOGGER.info('Re-established stream') def _recoverable(self, method, *args, **kwargs): """Wraps a method to recover the stream and retry on error. - If a recoverable error occurs, this will retry the RPC and retry the - method. If a second error occurs while retrying the method, it will - bubble up. + If a retryable error occurs while making the call, then the stream will + be re-opened and the method will be retried. This happens indefinitely + so long as the error is a retryable one. If an error occurs while + re-opening the stream, then this method will raise immediately and + trigger finalization of this object. Args: method (Callable[..., Any]): The method to call. args: The args to pass to the method. kwargs: The kwargs to pass to the method. """ - try: - return method(*args, **kwargs) + while True: + try: + return method(*args, **kwargs) - except Exception as exc: - if not self._should_recover(exc): - self.close() - raise exc + except Exception as exc: + if not self._should_recover(exc): + self.close() + raise exc - try: self._reopen() - return method(*args, **kwargs) - # If re-opening or re-calling the method fails for any reason, consider - # it a terminal error and finalize the object. - except Exception as exc: - self._finalize(exc) - raise def send(self, request): return self._recoverable( diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index aa0876798ad9..57661729a83f 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -33,6 +33,7 @@ import google.cloud.pubsub_v1.subscriber.scheduler _LOGGER = logging.getLogger(__name__) +_RPC_ERROR_THREAD_NAME = 'Thread-OnRpcTerminated' _RETRYABLE_STREAM_ERRORS = ( exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, @@ -414,11 +415,28 @@ def _should_recover(self, exception): # If this is in the list of idempotent exceptions, then we want to # recover. if isinstance(exception, _RETRYABLE_STREAM_ERRORS): + logging.info('Observed recoverable stream error %s', exception) return True + logging.info('Observed non-recoverable stream error %s', exception) return False def _on_rpc_done(self, future): + """Triggered whenever the underlying RPC terminates without recovery. + + This is typically triggered from one of two threads: the background + consumer thread (when calling ``recv()`` produces a non-recoverable + error) or the grpc management thread (when cancelling the RPC). + + This method is *non-blocking*. It will start another thread to deal + with shutting everything down. This is to prevent blocking in the + background consumer and preventing it from being ``joined()``. + """ _LOGGER.info( 'RPC termination has signaled streaming pull manager shutdown.') future = _maybe_wrap_exception(future) - self.close(reason=future) + thread = threading.Thread( + name=_RPC_ERROR_THREAD_NAME, + target=self.close, + kwargs={'reason': future}) + thread.daemon = True + thread.start() diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 61d040a26fc1..1aa979a504c9 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -420,10 +420,12 @@ def test__should_recover_false(): assert manager._should_recover(exc) is False -def test__on_rpc_done(): +@mock.patch('threading.Thread', autospec=True) +def test__on_rpc_done(thread): manager = make_manager() - with mock.patch.object(manager, 'close') as close: - manager._on_rpc_done(mock.sentinel.error) + manager._on_rpc_done(mock.sentinel.error) - close.assert_called_once_with(reason=mock.sentinel.error) + thread.assert_called_once_with( + name=mock.ANY, target=manager.close, + kwargs={'reason': mock.sentinel.error}) From 7d2138a9a0f86396f53cf62cf77b8e2ae01d8743 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 30 May 2018 12:03:02 -0700 Subject: [PATCH 54/75] Add heartbeating to the streaming pull manager (#5413) --- .../subscriber/_protocol/heartbeater.py | 70 +++++++++++ .../_protocol/streaming_pull_manager.py | 18 +++ .../pubsub_v1/subscriber/test_heartbeater.py | 119 ++++++++++++++++++ .../subscriber/test_streaming_pull_manager.py | 50 ++++++-- 4 files changed, 250 insertions(+), 7 deletions(-) create mode 100644 pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py create mode 100644 pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py new file mode 100644 index 000000000000..38d2ae8dc505 --- /dev/null +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/heartbeater.py @@ -0,0 +1,70 @@ +# Copyright 2018, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import + +import logging +import threading + + +_LOGGER = logging.getLogger(__name__) +_HEARTBEAT_WORKER_NAME = 'Thread-Heartbeater' +# How often to send heartbeats in seconds. Determined as half the period of +# time where the Pub/Sub server will close the stream as inactive, which is +# 60 seconds. +_DEFAULT_PERIOD = 30 + + +class Heartbeater(object): + def __init__(self, manager, period=_DEFAULT_PERIOD): + self._thread = None + self._operational_lock = threading.Lock() + self._manager = manager + self._stop_event = threading.Event() + self._period = period + + def heartbeat(self): + """Periodically send heartbeats.""" + while self._manager.is_active and not self._stop_event.is_set(): + self._manager.heartbeat() + _LOGGER.debug('Sent heartbeat.') + self._stop_event.wait(timeout=self._period) + + _LOGGER.info('%s exiting.', _HEARTBEAT_WORKER_NAME) + + def start(self): + with self._operational_lock: + if self._thread is not None: + raise ValueError('Heartbeater is already running.') + + # Create and start the helper thread. + self._stop_event.clear() + thread = threading.Thread( + name=_HEARTBEAT_WORKER_NAME, + target=self.heartbeat) + thread.daemon = True + thread.start() + _LOGGER.debug('Started helper thread %s', thread.name) + self._thread = thread + + def stop(self): + with self._operational_lock: + self._stop_event.set() + + if self._thread is not None: + # The thread should automatically exit when the consumer is + # inactive. + self._thread.join() + + self._thread = None diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 57661729a83f..24d57e1ee90b 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -26,6 +26,7 @@ from google.cloud.pubsub_v1 import types from google.cloud.pubsub_v1.subscriber._protocol import bidi from google.cloud.pubsub_v1.subscriber._protocol import dispatcher +from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import histogram from google.cloud.pubsub_v1.subscriber._protocol import leaser from google.cloud.pubsub_v1.subscriber._protocol import requests @@ -115,6 +116,7 @@ def __init__(self, client, subscription, flow_control=types.FlowControl(), self._dispatcher = None self._leaser = None self._consumer = None + self._heartbeater = None @property def is_active(self): @@ -263,6 +265,15 @@ def send(self, request): else: self._rpc.send(request) + def heartbeat(self): + """Sends an empty request over the streaming pull RPC. + + This always sends over the stream, regardless of if + ``self._UNARY_REQUESTS`` is set or not. + """ + if self._rpc is not None and self._rpc.is_active: + self._rpc.send(types.StreamingPullRequest()) + def open(self, callback): """Begin consuming messages. @@ -292,6 +303,7 @@ def open(self, callback): self._consumer = bidi.BackgroundConsumer( self._rpc, self._on_response) self._leaser = leaser.Leaser(self) + self._heartbeater = heartbeater.Heartbeater(self) # Start the thread to pass the requests. self._dispatcher.start() @@ -302,6 +314,9 @@ def open(self, callback): # Start the lease maintainer thread. self._leaser.start() + # Start the stream heartbeater thread. + self._heartbeater.start() + def close(self, reason=None): """Stop consuming messages and shutdown all helper threads. @@ -332,6 +347,9 @@ def close(self, reason=None): _LOGGER.debug('Stopping dispatcher.') self._dispatcher.stop() self._dispatcher = None + _LOGGER.debug('Stopping heartbeater.') + self._heartbeater.stop() + self._heartbeater = None self._rpc = None self._closed = True diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py new file mode 100644 index 000000000000..f9147a4d7e39 --- /dev/null +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_heartbeater.py @@ -0,0 +1,119 @@ +# Copyright 2018, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import threading + +from google.cloud.pubsub_v1.subscriber._protocol import heartbeater +from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager + +import mock +import pytest + + +def test_heartbeat_inactive(caplog): + caplog.set_level(logging.INFO) + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + manager.is_active = False + + heartbeater_ = heartbeater.Heartbeater(manager) + + heartbeater_.heartbeat() + + assert 'exiting' in caplog.text + + +def test_heartbeat_stopped(caplog): + caplog.set_level(logging.INFO) + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + + heartbeater_ = heartbeater.Heartbeater(manager) + heartbeater_.stop() + + heartbeater_.heartbeat() + + assert 'exiting' in caplog.text + + +def make_sleep_mark_manager_as_inactive(heartbeater): + # Make sleep mark the manager as inactive so that heartbeat() + # exits at the end of the first run. + def trigger_inactive(timeout): + assert timeout + heartbeater._manager.is_active = False + + heartbeater._stop_event.wait = trigger_inactive + + +def test_heartbeat_once(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + heartbeater_ = heartbeater.Heartbeater(manager) + make_sleep_mark_manager_as_inactive(heartbeater_) + + heartbeater_.heartbeat() + + manager.heartbeat.assert_called_once() + + +@mock.patch('threading.Thread', autospec=True) +def test_start(thread): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + heartbeater_ = heartbeater.Heartbeater(manager) + + heartbeater_.start() + + thread.assert_called_once_with( + name=heartbeater._HEARTBEAT_WORKER_NAME, + target=heartbeater_.heartbeat) + + thread.return_value.start.assert_called_once() + + assert heartbeater_._thread is not None + + +@mock.patch('threading.Thread', autospec=True) +def test_start_already_started(thread): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + heartbeater_ = heartbeater.Heartbeater(manager) + heartbeater_._thread = mock.sentinel.thread + + with pytest.raises(ValueError): + heartbeater_.start() + + thread.assert_not_called() + + +def test_stop(): + manager = mock.create_autospec( + streaming_pull_manager.StreamingPullManager, instance=True) + heartbeater_ = heartbeater.Heartbeater(manager) + thread = mock.create_autospec(threading.Thread, instance=True) + heartbeater_._thread = thread + + heartbeater_.stop() + + assert heartbeater_._stop_event.is_set() + thread.join.assert_called_once() + assert heartbeater_._thread is None + + +def test_stop_no_join(): + heartbeater_ = heartbeater.Heartbeater(mock.sentinel.manager) + + heartbeater_.stop() diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 1aa979a504c9..53b23dd7049f 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -23,6 +23,7 @@ from google.cloud.pubsub_v1.subscriber import scheduler from google.cloud.pubsub_v1.subscriber._protocol import bidi from google.cloud.pubsub_v1.subscriber._protocol import dispatcher +from google.cloud.pubsub_v1.subscriber._protocol import heartbeater from google.cloud.pubsub_v1.subscriber._protocol import leaser from google.cloud.pubsub_v1.subscriber._protocol import requests from google.cloud.pubsub_v1.subscriber._protocol import streaming_pull_manager @@ -216,6 +217,26 @@ def test_send_streaming(): manager._rpc.send.assert_called_once_with(mock.sentinel.request) +def test_heartbeat(): + manager = make_manager() + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + manager._rpc.is_active = True + + manager.heartbeat() + + manager._rpc.send.assert_called_once_with(types.StreamingPullRequest()) + + +def test_heartbeat_inactive(): + manager = make_manager() + manager._rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + manager._rpc.is_active = False + + manager.heartbeat() + + manager._rpc.send.assert_not_called() + + @mock.patch( 'google.cloud.pubsub_v1.subscriber._protocol.bidi.ResumableBidiRpc', autospec=True) @@ -228,11 +249,20 @@ def test_send_streaming(): @mock.patch( 'google.cloud.pubsub_v1.subscriber._protocol.dispatcher.Dispatcher', autospec=True) -def test_open(dispatcher, leaser, background_consumer, resumable_bidi_rpc): +@mock.patch( + 'google.cloud.pubsub_v1.subscriber._protocol.heartbeater.Heartbeater', + autospec=True) +def test_open( + heartbeater, dispatcher, leaser, background_consumer, + resumable_bidi_rpc): manager = make_manager() manager.open(mock.sentinel.callback) + heartbeater.assert_called_once_with(manager) + heartbeater.return_value.start.assert_called_once() + assert manager._heartbeater == heartbeater.return_value + dispatcher.assert_called_once_with(manager, manager._scheduler.queue) dispatcher.return_value.start.assert_called_once() assert manager._dispatcher == dispatcher.return_value @@ -285,27 +315,32 @@ def make_running_manager(): dispatcher.Dispatcher, instance=True) manager._leaser = mock.create_autospec( leaser.Leaser, instance=True) + manager._heartbeater = mock.create_autospec( + heartbeater.Heartbeater, instance=True) return ( manager, manager._consumer, manager._dispatcher, manager._leaser, - manager._scheduler) + manager._heartbeater, manager._scheduler) def test_close(): - manager, consumer, dispatcher, leaser, scheduler = make_running_manager() + manager, consumer, dispatcher, leaser, heartbeater, scheduler = ( + make_running_manager()) manager.close() consumer.stop.assert_called_once() leaser.stop.assert_called_once() dispatcher.stop.assert_called_once() + heartbeater.stop.assert_called_once() scheduler.shutdown.assert_called_once() assert manager.is_active is False def test_close_inactive_consumer(): - manager, consumer, dispatcher, leaser, scheduler = make_running_manager() + manager, consumer, dispatcher, leaser, heartbeater, scheduler = ( + make_running_manager()) consumer.is_active = False manager.close() @@ -313,11 +348,12 @@ def test_close_inactive_consumer(): consumer.stop.assert_not_called() leaser.stop.assert_called_once() dispatcher.stop.assert_called_once() + heartbeater.stop.assert_called_once() scheduler.shutdown.assert_called_once() def test_close_idempotent(): - manager, _, _, _, scheduler = make_running_manager() + manager, _, _, _, _, scheduler = make_running_manager() manager.close() manager.close() @@ -326,7 +362,7 @@ def test_close_idempotent(): def test_close_callbacks(): - manager, _, _, _, _ = make_running_manager() + manager, _, _, _, _, _ = make_running_manager() callback = mock.Mock() @@ -352,7 +388,7 @@ def test__get_initial_request(): def test_on_response(): - manager, _, dispatcher, _, scheduler = make_running_manager() + manager, _, dispatcher, _, _, scheduler = make_running_manager() manager._callback = mock.sentinel.callback # Set up the messages. From 69b201bb62d80301d7ef92be3e556ba8b0b0eb4c Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 30 May 2018 12:47:13 -0700 Subject: [PATCH 55/75] Release 0.35.2 (#5414) --- pubsub/CHANGELOG.md | 6 ++++++ pubsub/setup.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/pubsub/CHANGELOG.md b/pubsub/CHANGELOG.md index 3fe9694dae35..85af38e15069 100644 --- a/pubsub/CHANGELOG.md +++ b/pubsub/CHANGELOG.md @@ -4,6 +4,12 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.35.2 + +### Implementation Changes +- Add heartbeating to the streaming pull manager (#5413) +- Fix retrying of bidirectional RPCs and closing the streaming pull manager (#5412) + ## 0.35.1 ### Implementation Changes diff --git a/pubsub/setup.py b/pubsub/setup.py index 8d2bd4a24661..fd97318fee06 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.35.1' +version = '0.35.2' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 0f7c9279dd3bbca43d0c253ee2cca0ea0ac9e823 Mon Sep 17 00:00:00 2001 From: Aneep Tandel Date: Thu, 31 May 2018 02:49:38 +0530 Subject: [PATCH 56/75] BigTable: improve read rows validation performance (#5390) Modify validation code to minimize access to the chunk, since this access is very expensive. Instead, copy chunk to a cell data and validate it. Also, minimize access to chunk state variables, such as reset_row and commit_row. --- bigtable/google/cloud/bigtable/row_data.py | 184 ++++++++++----------- bigtable/tests/unit/test_row_data.py | 143 +++++++++++++++- spanner/benchmark/bin/ycsb | 0 3 files changed, 223 insertions(+), 104 deletions(-) mode change 100755 => 100644 spanner/benchmark/bin/ycsb diff --git a/bigtable/google/cloud/bigtable/row_data.py b/bigtable/google/cloud/bigtable/row_data.py index 3216be84dd2b..13db91b1268d 100644 --- a/bigtable/google/cloud/bigtable/row_data.py +++ b/bigtable/google/cloud/bigtable/row_data.py @@ -48,10 +48,10 @@ class Cell(object): :param labels: (Optional) List of strings. Labels applied to the cell. """ - def __init__(self, value, timestamp_micros, labels=()): + def __init__(self, value, timestamp_micros, labels=None): self.value = value self.timestamp_micros = timestamp_micros - self.labels = list(labels) + self.labels = list(labels) if labels is not None else [] @classmethod def from_pb(cls, cell_pb): @@ -374,6 +374,15 @@ class YieldRowsData(object): ROW_IN_PROGRESS = 'Row in progress' # Some cells complete for row CELL_IN_PROGRESS = 'Cell in progress' # Incomplete cell for row + STATE_START = 0 + STATE_NEW_ROW = 1 + STATE_ROW_IN_PROGRESS = 2 + STATE_CELL_IN_PROGRESS = 3 + + read_states = {STATE_START: START, STATE_NEW_ROW: NEW_ROW, + STATE_ROW_IN_PROGRESS: ROW_IN_PROGRESS, + STATE_CELL_IN_PROGRESS: CELL_IN_PROGRESS} + def __init__(self, read_method, request): # Counter for responses pulled from iterator self._counter = 0 @@ -400,17 +409,24 @@ def state(self): :returns: name of state corresponding to current row / chunk processing. """ + return self.read_states[self._state] + + @property + def _state(self): + """State machine state. + :rtype: int + :returns: id of state corresponding to currrent row / chunk + processing. + """ + if self._previous_cell is not None: + return self.STATE_ROW_IN_PROGRESS if self.last_scanned_row_key is None: - return self.START + return self.STATE_START if self._row is None: - assert self._cell is None - assert self._previous_cell is None - return self.NEW_ROW + return self.STATE_NEW_ROW if self._cell is not None: - return self.CELL_IN_PROGRESS - if self._previous_cell is not None: - return self.ROW_IN_PROGRESS - return self.NEW_ROW # row added, no chunk yet processed + return self.STATE_CELL_IN_PROGRESS + return self.STATE_NEW_ROW # row added, no chunk yet processed def cancel(self): """Cancels the iterator, closing the stream.""" @@ -470,33 +486,37 @@ def read_rows(self): for chunk in response.chunks: - self._validate_chunk(chunk) - if chunk.reset_row: + self._validate_chunk_reset_row(chunk) row = self._row = None cell = self._cell = self._previous_cell = None continue - if row is None: - row = self._row = PartialRowData(chunk.row_key) - if cell is None: - qualifier = None - if chunk.HasField('qualifier'): - qualifier = chunk.qualifier.value + qualifier = chunk.qualifier.value + if qualifier == b'' and not chunk.HasField('qualifier'): + qualifier = None - cell = self._cell = PartialCellData( + cell = PartialCellData( chunk.row_key, chunk.family_name.value, qualifier, chunk.timestamp_micros, chunk.labels, chunk.value) + self._validate_cell_data(cell) + self._cell = cell self._copy_from_previous(cell) else: cell.append_value(chunk.value) + if row is None: + row = self._row = PartialRowData(cell.row_key) + if chunk.commit_row: + if chunk.value_size > 0: + raise InvalidChunk() + self._save_current_cell() yield self._row @@ -511,94 +531,66 @@ def read_rows(self): self._save_current_cell() cell = None - @staticmethod - def _validate_chunk_status(chunk): - """Helper for :meth:`_validate_chunk_row_in_progress`, etc.""" - # No reseet with other keys - if chunk.reset_row: - _raise_if(chunk.row_key) - _raise_if(chunk.HasField('family_name')) - _raise_if(chunk.HasField('qualifier')) - _raise_if(chunk.timestamp_micros) - _raise_if(chunk.labels) - _raise_if(chunk.value_size) - _raise_if(chunk.value) - # No commit with value size - _raise_if(chunk.commit_row and chunk.value_size > 0) - # No negative value_size (inferred as a general constraint). - _raise_if(chunk.value_size < 0) - - def _validate_chunk_new_row(self, chunk): - """Helper for :meth:`_validate_chunk`.""" - assert self.state == self.NEW_ROW - _raise_if(chunk.reset_row) - _raise_if(not chunk.row_key) - _raise_if(not chunk.family_name) - _raise_if(not chunk.qualifier) - # This constraint is not enforced in the Go example. - _raise_if(chunk.value_size > 0 and chunk.commit_row is not False) - # This constraint is from the Go example, not the spec. - _raise_if(self._previous_row is not None and - chunk.row_key <= self._previous_row.row_key) - - def _same_as_previous(self, chunk): - """Helper for :meth:`_validate_chunk_row_in_progress`""" - previous = self._previous_cell - return (chunk.row_key == previous.row_key and - chunk.family_name == previous.family_name and - chunk.qualifier == previous.qualifier and - chunk.labels == previous.labels) - - def _validate_chunk_row_in_progress(self, chunk): - """Helper for :meth:`_validate_chunk`""" - assert self.state == self.ROW_IN_PROGRESS - self._validate_chunk_status(chunk) - _raise_if(chunk.row_key and - chunk.row_key != self._row.row_key) - _raise_if(chunk.HasField('family_name') and - not chunk.HasField('qualifier')) - previous = self._previous_cell - _raise_if(self._same_as_previous(chunk) and - chunk.timestamp_micros <= previous.timestamp_micros) - - def _validate_chunk_cell_in_progress(self, chunk): - """Helper for :meth:`_validate_chunk`""" - assert self.state == self.CELL_IN_PROGRESS - self._validate_chunk_status(chunk) - self._copy_from_current(chunk) - - def _validate_chunk(self, chunk): - """Helper for :meth:`consume_next`.""" - if self.state == self.NEW_ROW: - self._validate_chunk_new_row(chunk) - if self.state == self.ROW_IN_PROGRESS: - self._validate_chunk_row_in_progress(chunk) - if self.state == self.CELL_IN_PROGRESS: - self._validate_chunk_cell_in_progress(chunk) + def _validate_cell_data(self, cell): + if self._state == self.STATE_ROW_IN_PROGRESS: + self._validate_cell_data_row_in_progress(cell) + if self._state == self.STATE_NEW_ROW: + self._validate_cell_data_new_row(cell) + if self._state == self.STATE_CELL_IN_PROGRESS: + self._copy_from_current(cell) + + def _validate_cell_data_new_row(self, cell): + if (not cell.row_key or + not cell.family_name or + cell.qualifier is None): + raise InvalidChunk() + + if (self._previous_row is not None and + cell.row_key <= self._previous_row.row_key): + raise InvalidChunk() + + def _validate_cell_data_row_in_progress(self, cell): + if ((cell.row_key and + cell.row_key != self._row.row_key) or + (cell.family_name and cell.qualifier is None)): + raise InvalidChunk() + + def _validate_chunk_reset_row(self, chunk): + # No reset for new row + _raise_if(self._state == self.STATE_NEW_ROW) + + # No reset with other keys + _raise_if(chunk.row_key) + _raise_if(chunk.HasField('family_name')) + _raise_if(chunk.HasField('qualifier')) + _raise_if(chunk.timestamp_micros) + _raise_if(chunk.labels) + _raise_if(chunk.value_size) + _raise_if(chunk.value) def _save_current_cell(self): """Helper for :meth:`consume_next`.""" row, cell = self._row, self._cell family = row._cells.setdefault(cell.family_name, {}) qualified = family.setdefault(cell.qualifier, []) - complete = Cell.from_pb(self._cell) + complete = Cell.from_pb(cell) qualified.append(complete) self._cell, self._previous_cell = None, cell - def _copy_from_current(self, chunk): - """Helper for :meth:`consume_next`.""" + def _copy_from_current(self, cell): current = self._cell if current is not None: - if not chunk.row_key: - chunk.row_key = current.row_key - if not chunk.HasField('family_name'): - chunk.family_name.value = current.family_name - if not chunk.HasField('qualifier'): - chunk.qualifier.value = current.qualifier - if not chunk.timestamp_micros: - chunk.timestamp_micros = current.timestamp_micros - if not chunk.labels: - chunk.labels.extend(current.labels) + if not cell.row_key: + cell.row_key = current.row_key + if not cell.family_name: + cell.family_name = current.family_name + # NOTE: ``cell.qualifier`` **can** be empty string. + if cell.qualifier is None: + cell.qualifier = current.qualifier + if not cell.timestamp_micros: + cell.timestamp_micros = current.timestamp_micros + if not cell.labels: + cell.labels.extend(current.labels) def _copy_from_previous(self, cell): """Helper for :meth:`consume_next`.""" diff --git a/bigtable/tests/unit/test_row_data.py b/bigtable/tests/unit/test_row_data.py index a2293aae3943..b5f146c47715 100644 --- a/bigtable/tests/unit/test_row_data.py +++ b/bigtable/tests/unit/test_row_data.py @@ -17,6 +17,41 @@ import mock +from ._testing import _make_credentials + + +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + return self.channel_stub.responses.pop() + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + def unary_stream(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + class TestCell(unittest.TestCase): timestamp_micros = 18738724000 # Make sure millis granularity @@ -369,6 +404,15 @@ def _get_target_class(): def _make_one(self, *args, **kwargs): return self._get_target_class()(*args, **kwargs) + @staticmethod + def _get_target_client_class(): + from google.cloud.bigtable.client import Client + + return Client + + def _make_client(self, *args, **kwargs): + return self._get_target_client_class()(*args, **kwargs) + def test_state_start(self): client = _Client() iterator = _MockCancellableIterator() @@ -379,13 +423,36 @@ def test_state_start(self): self.assertEqual(yrd.state, yrd.START) def test_state_new_row_w_row(self): - client = _Client() - iterator = _MockCancellableIterator() - client._data_stub = mock.MagicMock() - client._data_stub.ReadRows.side_effect = [iterator] + from google.cloud.bigtable_v2.gapic import bigtable_client + + chunk = _ReadRowsResponseCellChunkPB( + row_key=self.ROW_KEY, + family_name=self.FAMILY_NAME, + qualifier=self.QUALIFIER, + timestamp_micros=self.TIMESTAMP_MICROS, + value=self.VALUE, + commit_row=True, + ) + chunks = [chunk] + + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + channel = ChannelStub(responses=[iterator]) + data_api = bigtable_client.BigtableClient(channel=channel) + credentials = _make_credentials() + client = self._make_client(project='project-id', + credentials=credentials, admin=True) + client._table_data_client = data_api request = object() - yrd = self._make_one(client._data_stub.ReadRows, request) - yrd.last_scanned_row_key = '' + yrd = self._make_one( + client._table_data_client.bigtable_stub.ReadRows, request) + yrd._response_iterator = iterator + yrd._last_scanned_row_key = '' + rows = [row for row in yrd.read_rows()] + + result = rows[0] + self.assertEqual(result.row_key, self.ROW_KEY) + yrd._row = object() self.assertEqual(yrd.state, yrd.NEW_ROW) @@ -441,6 +508,22 @@ def test__copy_from_current_blank(self): self.assertEqual(chunk.timestamp_micros, TIMESTAMP_MICROS) self.assertEqual(chunk.labels, LABELS) + def test__copy_from_current_empty_chunk(self): + client = _Client() + client._data_stub = mock.MagicMock() + request = object() + yrd = self._make_one(client._data_stub.ReadRows, request) + yrd._cell = _PartialCellData() + yrd._cell.qualifier = b'' + chunks = _generate_cell_chunks(['']) + chunk = chunks[0] + yrd._copy_from_current(chunk) + self.assertEqual(chunk.row_key, b'') + self.assertEqual(chunk.family_name.value, '') + self.assertEqual(chunk.qualifier.value, b'') + self.assertEqual(chunk.timestamp_micros, 0) + self.assertEqual(chunk.labels, []) + def test__copy_from_previous_unset(self): client = _Client() client._data_stub = mock.MagicMock() @@ -448,7 +531,7 @@ def test__copy_from_previous_unset(self): yrd = self._make_one(client._data_stub.ReadRows, request) cell = _PartialCellData() yrd._copy_from_previous(cell) - self.assertEqual(cell.row_key, '') + self.assertEqual(cell.row_key, b'') self.assertEqual(cell.family_name, u'') self.assertIsNone(cell.qualifier) self.assertEqual(cell.timestamp_micros, 0) @@ -544,6 +627,50 @@ def test_invalid_empty_chunk(self): with self.assertRaises(InvalidChunk): self._consume_all(yrd) + def test_state_cell_in_progress(self): + LABELS = ['L1', 'L2'] + + client = _Client() + chunk = _ReadRowsResponseCellChunkPB( + row_key=self.ROW_KEY, + family_name=self.FAMILY_NAME, + qualifier=self.QUALIFIER, + timestamp_micros=self.TIMESTAMP_MICROS, + value=self.VALUE, + commit_row=True, + ) + chunks = [chunk] + response = _ReadRowsResponseV2(chunks) + iterator = _MockCancellableIterator(response) + client._data_stub = mock.MagicMock() + client._data_stub.ReadRows.side_effect = [iterator] + request = object() + yrd = self._make_one(client._data_stub.ReadRows, request) + self._consume_all(yrd) + yrd._last_scanned_row_key = '' + yrd._row = object() + cell = _PartialCellData( + row_key=self.ROW_KEY, + family_name=self.FAMILY_NAME, + qualifier=self.QUALIFIER, + timestamp_micros=self.TIMESTAMP_MICROS, + labels=LABELS + ) + + yrd._cell = cell + more_cell_data = _PartialCellData( + value=self.VALUE + ) + + yrd._validate_cell_data(more_cell_data) + + self.assertEqual(more_cell_data.row_key, self.ROW_KEY) + self.assertEqual(more_cell_data.family_name, self.FAMILY_NAME) + self.assertEqual(more_cell_data.qualifier, self.QUALIFIER) + self.assertEqual(more_cell_data.timestamp_micros, + self.TIMESTAMP_MICROS) + self.assertEqual(more_cell_data.labels, LABELS) + def test_yield_rows_data(self): client = _Client() @@ -848,7 +975,7 @@ def next(self): class _PartialCellData(object): - row_key = '' + row_key = b'' family_name = u'' qualifier = None timestamp_micros = 0 diff --git a/spanner/benchmark/bin/ycsb b/spanner/benchmark/bin/ycsb old mode 100755 new mode 100644 From 208250a0af66b75b0ec0bc0e3b2ceb5439d675c5 Mon Sep 17 00:00:00 2001 From: Christopher Wilcox Date: Wed, 30 May 2018 16:17:11 -0700 Subject: [PATCH 57/75] Add redis documentation to main index.rst (#5405) * Fix #5357 by adding redis documentation to main index.rst --- docs/index.rst | 1 + docs/redis | 1 + docs/requirements.txt | 1 + redis/docs/index.rst | 6 ++--- .../redis_v1beta1/gapic/cloud_redis_client.py | 22 ++++++------------- 5 files changed, 13 insertions(+), 18 deletions(-) create mode 120000 docs/redis diff --git a/docs/index.rst b/docs/index.rst index d0831181e999..3facc3bdd945 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -23,6 +23,7 @@ error-reporting/usage monitoring/index logging/usage + redis/index storage/client texttospeech/index translate/usage diff --git a/docs/redis b/docs/redis new file mode 120000 index 000000000000..351c953543ba --- /dev/null +++ b/docs/redis @@ -0,0 +1 @@ +../redis/docs \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt index 7cb4e39f0bfc..a9e9efb3e9fc 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -21,6 +21,7 @@ error_reporting/ monitoring/ pubsub/ oslogin/ +redis/ resource_manager/ runtimeconfig/ spanner/ diff --git a/redis/docs/index.rst b/redis/docs/index.rst index 6735c3065158..2686f32f5c0b 100644 --- a/redis/docs/index.rst +++ b/redis/docs/index.rst @@ -9,8 +9,8 @@ Redis instances on the Google Cloud Platform. .. _Alpha: https://github.com/GoogleCloudPlatform/google-cloud-python/blob/master/README.rst .. _Google Cloud Memorystore for Redis API: https://cloud.google.com/redis -.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/redis/usage.html -.. _Product Documentation: https://cloud.google.com/redis +.. _Client Library Documentation: https://googlecloudplatform.github.io/google-cloud-python/stable/redis/index.rst +.. _Product Documentation: https://cloud.google.com/memorystore Quick Start ----------- @@ -81,4 +81,4 @@ Api Reference :maxdepth: 2 gapic/v1beta1/api - gapic/v1beta1/types \ No newline at end of file + gapic/v1beta1/types diff --git a/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py b/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py index 70ecf9b65897..d66da2ec8637 100644 --- a/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py +++ b/redis/google/cloud/redis_v1beta1/gapic/cloud_redis_client.py @@ -51,6 +51,7 @@ class CloudRedisClient(object): * Each project has a collection of available locations, named: ``/locations/*`` * Each location has a collection of Redis instances, named: ``/instances/*`` * As such, Redis instances are resources of the form: + ``/projects/{project_id}/locations/{location_id}/instances/{instance_id}`` Note that location_id must be refering to a GCP ``region``; for example: @@ -219,9 +220,7 @@ def list_instances(self, Args: parent (str): Required. The resource name of the instance location using the form: - :: - - `projects/{project_id}/locations/{location_id}` + `projects/{project_id}/locations/{location_id}` where ``location_id`` refers to a GCP region page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- @@ -290,9 +289,7 @@ def get_instance(self, Args: name (str): Required. Redis instance resource name using the form: - :: - - `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where ``location_id`` refers to a GCP region retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not @@ -366,11 +363,8 @@ def create_instance(self, Args: parent (str): Required. The resource name of the instance location using the form: - :: - - `projects/{project_id}/locations/{location_id}` - where ``location_id`` refers to a GCP region - instance_id (str): Required. The logical name of the Redis instance in the customer project + `projects/{project_id}/locations/{location_id}` where ``location_id`` refers to a GCP region + instance_id (str): Required. The logical name of the Redis instance in the customer project with the following restrictions: * Must contain only lowercase letters, numbers, and hyphens. @@ -532,11 +526,9 @@ def delete_instance(self, Args: name (str): Required. Redis instance resource name using the form: - :: - - `projects/{project_id}/locations/{location_id}/instances/{instance_id}` + `projects/{project_id}/locations/{location_id}/instances/{instance_id}` where ``location_id`` refers to a GCP region - retry (Optional[google.api_core.retry.Retry]): A retry object used + retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait From 61bbc7dd078c99373c288e7cdbdf1ea29c0fcbb2 Mon Sep 17 00:00:00 2001 From: Winston Huang Date: Thu, 31 May 2018 06:07:53 -0700 Subject: [PATCH 58/75] Prevent process_read_batch from mutating params (#5416) --- spanner/google/cloud/spanner_v1/database.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spanner/google/cloud/spanner_v1/database.py b/spanner/google/cloud/spanner_v1/database.py index 8379afe1013e..b55fc9a24690 100644 --- a/spanner/google/cloud/spanner_v1/database.py +++ b/spanner/google/cloud/spanner_v1/database.py @@ -16,6 +16,7 @@ import re import threading +import copy from google.api_core.gapic_v1 import client_info import google.auth.credentials @@ -580,7 +581,7 @@ def process_read_batch(self, batch): :rtype: :class:`~google.cloud.spanner_v1.streamed.StreamedResultSet` :returns: a result set instance which can be used to consume rows. """ - kwargs = batch['read'] + kwargs = copy.deepcopy(batch['read']) keyset_dict = kwargs.pop('keyset') kwargs['keyset'] = KeySet._from_dict(keyset_dict) return self._get_snapshot().read( From 4f586b6d84fb7ec4c89c1ef2bc6d55531feb9a33 Mon Sep 17 00:00:00 2001 From: Aneep Tandel Date: Thu, 31 May 2018 19:18:51 +0530 Subject: [PATCH 59/75] BigTable: Add admin app profile methods on Instance (#5315) --- bigtable/google/cloud/bigtable/instance.py | 200 ++++++++++++ bigtable/tests/system.py | 33 ++ bigtable/tests/unit/test_instance.py | 361 +++++++++++++++++++++ 3 files changed, 594 insertions(+) diff --git a/bigtable/google/cloud/bigtable/instance.py b/bigtable/google/cloud/bigtable/instance.py index 8543f88409d4..97b90e1ab9d4 100644 --- a/bigtable/google/cloud/bigtable/instance.py +++ b/bigtable/google/cloud/bigtable/instance.py @@ -20,6 +20,8 @@ from google.cloud.bigtable.table import Table from google.cloud.bigtable.cluster import DEFAULT_SERVE_NODES +from google.protobuf import field_mask_pb2 + from google.cloud.bigtable_admin_v2 import enums from google.cloud.bigtable_admin_v2.types import instance_pb2 @@ -27,6 +29,8 @@ _EXISTING_INSTANCE_LOCATION_ID = 'see-existing-cluster' _INSTANCE_NAME_RE = re.compile(r'^projects/(?P[^/]+)/' r'instances/(?P[a-z][-a-z0-9]*)$') +ROUTING_POLICY_TYPE_ANY = 1 +ROUTING_POLICY_TYPE_SINGLE = 2 _STORAGE_TYPE_UNSPECIFIED = enums.StorageType.STORAGE_TYPE_UNSPECIFIED @@ -282,3 +286,199 @@ def list_tables(self): result.append(self.table(table_id)) return result + + def create_app_profile(self, app_profile_id, routing_policy_type, + description='', ignore_warnings=None, + cluster_id=None, allow_transactional_writes=False): + """Creates an app profile within an instance. + + :type: app_profile_id: str + :param app_profile_id: The unique name for the new app profile. + + :type: routing_policy_type: int + :param: routing_policy_type: There are two routing policies + ROUTING_POLICY_TYPE_ANY = 1 and + ROUTING_POLICY_TYPE_SINGLE = 2. + If ROUTING_POLICY_TYPE_ANY + which will create a + MultiClusterRoutingUseAny policy and if + ROUTING_POLICY_TYPE_ANY is specified, a + SingleClusterRouting policy will be created + using the cluster_id and + allow_transactional_writes parameters. + + :type: description: str + :param: description: (Optional) Long form description of the use + case for this AppProfile. + + :type: ignore_warnings: bool + :param: ignore_warnings: (Optional) If true, ignore safety checks when + creating the app profile. + + :type: cluster_id: str + :param: cluster_id: (Optional) Unique cluster_id which is only required + when routing_policy_type is + ROUTING_POLICY_TYPE_SINGLE. + + :type: allow_transactional_writes: bool + :param: allow_transactional_writes: (Optional) If true, allow + transactional writes for + ROUTING_POLICY_TYPE_SINGLE. + + :rtype: :class:`~google.cloud.bigtable_admin_v2.types.AppProfile` + :return: The AppProfile instance. + :raises: :class:`ValueError ` If routing + policy is not set. + """ + if not routing_policy_type: + raise ValueError('AppProfile required routing policy.') + + single_cluster_routing = None + multi_cluster_routing_use_any = None + instance_admin_client = self._client._instance_admin_client + name = instance_admin_client.app_profile_path( + self._client.project, self.instance_id, app_profile_id) + + if routing_policy_type == ROUTING_POLICY_TYPE_ANY: + multi_cluster_routing_use_any = ( + instance_pb2.AppProfile.MultiClusterRoutingUseAny()) + + if routing_policy_type == ROUTING_POLICY_TYPE_SINGLE: + single_cluster_routing = ( + instance_pb2.AppProfile.SingleClusterRouting( + cluster_id=cluster_id, + allow_transactional_writes=allow_transactional_writes + )) + + app_profile = instance_pb2.AppProfile( + name=name, description=description, + multi_cluster_routing_use_any=multi_cluster_routing_use_any, + single_cluster_routing=single_cluster_routing + ) + + return self._client._instance_admin_client.create_app_profile( + parent=self.name, app_profile_id=app_profile_id, + app_profile=app_profile, ignore_warnings=ignore_warnings) + + def get_app_profile(self, app_profile_id): + """Gets information about an app profile. + + :type: app_profile_id: str + :param app_profile_id: The unique name for the app profile. + + :rtype: :class:`~google.cloud.bigtable_admin_v2.types.AppProfile` + :return: The AppProfile instance. + """ + instance_admin_client = self._client._instance_admin_client + name = instance_admin_client.app_profile_path( + self._client.project, self.instance_id, app_profile_id) + return self._client._instance_admin_client.get_app_profile(name) + + def list_app_profiles(self): + """Lists information about app profiles in an instance. + + :rtype: :list:[`~google.cloud.bigtable_admin_v2.types.AppProfile`] + :return: A :list:[`~google.cloud.bigtable_admin_v2.types.AppProfile`]. + By default, this is a list of + :class:`~google.cloud.bigtable_admin_v2.types.AppProfile` + instances. + """ + list_app_profiles = list( + self._client._instance_admin_client.list_app_profiles(self.name)) + return list_app_profiles + + def update_app_profile(self, app_profile_id, update_mask, + routing_policy_type, description='', + ignore_warnings=None, + cluster_id=None, allow_transactional_writes=False): + """Updates an app profile within an instance. + + :type: app_profile_id: str + :param app_profile_id: The unique name for the new app profile. + + :type: update_mask: list + :param: update_mask: Name of the parameters of AppProfiles that + needed to update. + + :type: routing_policy_type: int + :param: routing_policy_type: There are two routing policies + ROUTING_POLICY_TYPE_ANY = 1 and + ROUTING_POLICY_TYPE_SINGLE = 2. + If ROUTING_POLICY_TYPE_ANY + which will create a + MultiClusterRoutingUseAny policy and if + ROUTING_POLICY_TYPE_ANY is specified, a + SingleClusterRouting policy will be created + using the cluster_id and + allow_transactional_writes parameters. + + :type: description: str + :param: description: (Optional) Optional long form description of the + use case for this AppProfile. + + :type: ignore_warnings: bool + :param: ignore_warnings: (Optional) If true, ignore safety checks when + creating the app profile. + + :type: cluster_id: str + :param: cluster_id: (Optional) Unique cluster_id which is only required + when routing_policy_type is + ROUTING_POLICY_TYPE_SINGLE. + + :type: allow_transactional_writes: bool + :param: allow_transactional_writes: (Optional) If true, allow + transactional writes for + ROUTING_POLICY_TYPE_SINGLE. + + :rtype: :class:`~google.cloud.bigtable_admin_v2.types.AppProfile` + :return: The AppProfile instance. + :raises: :class:`ValueError ` If routing + policy is not set. + """ + if not routing_policy_type: + raise ValueError('AppProfile required routing policy.') + + single_cluster_routing = None + multi_cluster_routing_use_any = None + instance_admin_client = self._client._instance_admin_client + name = instance_admin_client.app_profile_path( + self._client.project, self.instance_id, app_profile_id) + + if routing_policy_type == ROUTING_POLICY_TYPE_ANY: + multi_cluster_routing_use_any = ( + instance_pb2.AppProfile.MultiClusterRoutingUseAny()) + + if routing_policy_type == ROUTING_POLICY_TYPE_SINGLE: + single_cluster_routing = ( + instance_pb2.AppProfile.SingleClusterRouting( + cluster_id=cluster_id, + allow_transactional_writes=allow_transactional_writes + )) + + update_app_profile = instance_pb2.AppProfile( + name=name, description=description, + multi_cluster_routing_use_any=multi_cluster_routing_use_any, + single_cluster_routing=single_cluster_routing + ) + update_mask = field_mask_pb2.FieldMask(paths=update_mask) + + return self._client._instance_admin_client.update_app_profile( + app_profile=update_app_profile, update_mask=update_mask, + ignore_warnings=ignore_warnings) + + def delete_app_profile(self, app_profile_id, ignore_warnings=False): + """Deletes an app profile from an instance. + + :type: app_profile_id: str + :param app_profile_id: The unique name for the app profile to delete. + + :raises: google.api_core.exceptions.GoogleAPICallError: If the request + failed for any reason. google.api_core.exceptions.RetryError: + If the request failed due to a retryable error and retry + attempts failed. ValueError: If the parameters are invalid. + """ + instance_admin_client = self._client._instance_admin_client + app_profile_path = instance_admin_client.app_profile_path( + self._client.project, self.instance_id, app_profile_id) + self._client._instance_admin_client.delete_app_profile( + app_profile_path, ignore_warnings) diff --git a/bigtable/tests/system.py b/bigtable/tests/system.py index 067bd1b86a2c..b7896d19e294 100644 --- a/bigtable/tests/system.py +++ b/bigtable/tests/system.py @@ -40,6 +40,8 @@ LOCATION_ID = 'us-central1-c' INSTANCE_ID = 'g-c-p' + unique_resource_id('-') TABLE_ID = 'google-cloud-python-test-table' +APP_PROFILE_ID = 'app-profile-id' +CLUSTER_ID = INSTANCE_ID+'-cluster' COLUMN_FAMILY_ID1 = u'col-fam-id1' COLUMN_FAMILY_ID2 = u'col-fam-id2' COL_NAME1 = b'col-name1' @@ -51,6 +53,8 @@ CELL_VAL4 = b'foo' ROW_KEY = b'row-key' ROW_KEY_ALT = b'row-key-alt' +ROUTING_POLICY_TYPE_ANY = 1 +ROUTING_POLICY_TYPE_SINGLE = 2 EXISTING_INSTANCES = [] @@ -174,6 +178,35 @@ def test_update(self): Config.INSTANCE.display_name = OLD_DISPLAY_NAME Config.INSTANCE.update() + def test_create_app_profile_with_multi_routing_policy(self): + # Create a new instance instance and reload it. + description = 'Foo App Profile' + instance = Config.INSTANCE + + app_profile = instance.create_app_profile( + app_profile_id=APP_PROFILE_ID+'-multi', + routing_policy_type=ROUTING_POLICY_TYPE_ANY, + description=description, + ignore_warnings=True + ) + + self.assertEqual(app_profile.description, description) + + def test_create_app_profile_with_single_routing_policy(self): + # Create a new instance instance and reload it. + description = 'Foo App Profile' + instance = Config.INSTANCE + + app_profile = instance.create_app_profile( + app_profile_id=APP_PROFILE_ID+'-single', + routing_policy_type=ROUTING_POLICY_TYPE_SINGLE, + description=description, + cluster_id=CLUSTER_ID, + ignore_warnings=True + ) + + self.assertEqual(app_profile.description, description) + class TestTableAdminAPI(unittest.TestCase): diff --git a/bigtable/tests/unit/test_instance.py b/bigtable/tests/unit/test_instance.py index b3654ce8091e..46bb52799b20 100644 --- a/bigtable/tests/unit/test_instance.py +++ b/bigtable/tests/unit/test_instance.py @@ -20,6 +20,33 @@ from ._testing import _make_credentials +class MultiCallableStub(object): + """Stub for the grpc.UnaryUnaryMultiCallable interface.""" + + def __init__(self, method, channel_stub): + self.method = method + self.channel_stub = channel_stub + + def __call__(self, request, timeout=None, metadata=None, credentials=None): + self.channel_stub.requests.append((self.method, request)) + + return self.channel_stub.responses.pop() + + +class ChannelStub(object): + """Stub for the grpc.Channel interface.""" + + def __init__(self, responses=[]): + self.responses = responses + self.requests = [] + + def unary_unary(self, + method, + request_serializer=None, + response_deserializer=None): + return MultiCallableStub(method, self) + + class TestInstance(unittest.TestCase): PROJECT = 'project' @@ -27,6 +54,9 @@ class TestInstance(unittest.TestCase): INSTANCE_NAME = 'projects/' + PROJECT + '/instances/' + INSTANCE_ID LOCATION_ID = 'locname' LOCATION = 'projects/' + PROJECT + '/locations/' + LOCATION_ID + APP_PROFILE_PATH = ( + 'projects/' + PROJECT + '/instances/' + INSTANCE_ID + + '/appProfiles/') DISPLAY_NAME = 'display_name' OP_ID = 8915 OP_NAME = ('operations/projects/%s/instances/%soperations/%d' % @@ -408,6 +438,330 @@ def test_list_tables_failure_name_bad_before(self): with self.assertRaises(ValueError): self._list_tables_helper(table_name=BAD_TABLE_NAME) + def test_create_app_profile_with_wrong_routing_policy(self): + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) + instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) + + app_profile_id = 'appProfileId1262094415' + update_mask = [] + + # Create AppProfile with exception + with self.assertRaises(ValueError): + instance.create_app_profile(app_profile_id=app_profile_id, + routing_policy_type=None) + + with self.assertRaises(ValueError): + instance.update_app_profile(app_profile_id, + update_mask=update_mask, + routing_policy_type=None) + + def test_create_app_profile_with_multi_routing_policy(self): + from google.cloud.bigtable_admin_v2.proto import instance_pb2 + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) + instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) + + description = 'description-1724546052' + app_profile_id = 'appProfileId1262094415' + expected_response = { + 'name': self.APP_PROFILE_PATH + app_profile_id, + 'description': description, + 'multi_cluster_routing_use_any': + instance_pb2.AppProfile.MultiClusterRoutingUseAny() + } + expected_request = { + 'app_profile_id': app_profile_id, + 'routing_policy_type': 1, + 'description': description + } + expected_response = instance_pb2.AppProfile(**expected_response) + + channel = ChannelStub(responses=[expected_response]) + instance_api = ( + bigtable_instance_admin_client.BigtableInstanceAdminClient( + channel=channel)) + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + + # Perform the method and check the result. + result = instance.create_app_profile(**expected_request) + + parent = client._instance_admin_client.instance_path( + self.PROJECT, self.INSTANCE_ID) + expected_request = _CreateAppProfileRequestPB( + parent=parent, app_profile_id=app_profile_id, + app_profile=expected_response, + ) + + actual_request = channel.requests[0][1] + assert expected_request == actual_request + self.assertEqual(result, expected_response) + + def test_create_app_profile_with_single_routing_policy(self): + from google.cloud.bigtable_admin_v2.proto import instance_pb2 + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) + instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) + + description = 'description-1724546052' + app_profile_id = 'appProfileId1262094415' + cluster_id = 'cluster-id' + expected_response = { + 'name': self.APP_PROFILE_PATH + app_profile_id, + 'description': description, + 'single_cluster_routing': + instance_pb2.AppProfile.SingleClusterRouting( + cluster_id=cluster_id, + allow_transactional_writes=False + ) + } + expected_request = { + 'app_profile_id': app_profile_id, + 'routing_policy_type': 2, + 'description': description, + 'cluster_id': cluster_id + } + expected_response = instance_pb2.AppProfile(**expected_response) + + channel = ChannelStub(responses=[expected_response]) + instance_api = ( + bigtable_instance_admin_client.BigtableInstanceAdminClient( + channel=channel)) + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + + # Perform the method and check the result. + result = instance.create_app_profile(**expected_request) + + parent = client._instance_admin_client.instance_path( + self.PROJECT, self.INSTANCE_ID) + expected_request = _CreateAppProfileRequestPB( + parent=parent, app_profile_id=app_profile_id, + app_profile=expected_response, + ) + + actual_request = channel.requests[0][1] + assert expected_request == actual_request + self.assertEqual(result, expected_response) + + def test_get_app_profile(self): + from google.cloud.bigtable_admin_v2.proto import ( + instance_pb2 as instance_data_v2_pb2) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + instance_api = ( + bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock())) + + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) + instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) + + name = 'name3373707' + etag = 'etag3123477' + description = 'description-1724546052' + expected_response = { + 'name': name, + 'etag': etag, + 'description': description + } + expected_response = instance_data_v2_pb2.AppProfile( + **expected_response) + + response_pb = instance_data_v2_pb2.AppProfile( + name=name, + etag=etag, + description=description + ) + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + bigtable_instance_stub = ( + client._instance_admin_client.bigtable_instance_admin_stub) + bigtable_instance_stub.GetAppProfile.side_effect = [response_pb] + + # Perform the method and check the result. + app_profile_id = 'appProfileId1262094415' + result = instance.get_app_profile(app_profile_id=app_profile_id) + + self.assertEqual(result, expected_response) + + def test_list_app_profiles(self): + from google.cloud.bigtable_admin_v2.proto import ( + bigtable_instance_admin_pb2 as instance_messages_v1_pb2) + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + instance_api = ( + bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock())) + + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) + instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) + + # Setup Expected Response + next_page_token = '' + app_profiles_element = {} + app_profiles = [app_profiles_element] + expected_response = { + 'next_page_token': next_page_token, + 'app_profiles': app_profiles + } + expected_response = instance_messages_v1_pb2.ListAppProfilesResponse( + **expected_response) + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + bigtable_instance_stub = ( + client._instance_admin_client.bigtable_instance_admin_stub) + bigtable_instance_stub.ListAppProfiles.side_effect = [ + expected_response] + + # Perform the method and check the result. + response = instance.list_app_profiles() + + self.assertEqual(response[0], expected_response.app_profiles[0]) + + def test_update_app_profile(self): + import datetime + from google.api_core import operation + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud.bigtable_admin_v2.proto import ( + bigtable_instance_admin_pb2 as messages_v2_pb2) + from google.cloud._helpers import _datetime_to_pb_timestamp + from tests.unit._testing import _FakeStub + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + instance_api = ( + bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock())) + + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) + instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) + + # Create response_pb + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + metadata = messages_v2_pb2.CreateInstanceMetadata(request_time=NOW_PB) + type_url = 'type.googleapis.com/%s' % ( + messages_v2_pb2.CreateInstanceMetadata.DESCRIPTOR.full_name,) + response_pb = operations_pb2.Operation( + name=self.OP_NAME, + metadata=Any( + type_url=type_url, + value=metadata.SerializeToString(), + ) + ) + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + stub = _FakeStub(response_pb) + client._instance_admin_client.bigtable_instance_admin_stub = stub + update_mask = [] + + # Perform the method and check the result. + app_profile_id = 'appProfileId1262094415' + result = instance.update_app_profile(app_profile_id, + update_mask=update_mask, + routing_policy_type=1) + + self.assertIsInstance(result, operation.Operation) + + def test_update_app_profile_with_single_routing_policy(self): + import datetime + from google.api_core import operation + from google.longrunning import operations_pb2 + from google.protobuf.any_pb2 import Any + from google.cloud.bigtable_admin_v2.proto import ( + bigtable_instance_admin_pb2 as messages_v2_pb2) + from google.cloud._helpers import _datetime_to_pb_timestamp + from tests.unit._testing import _FakeStub + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + instance_api = ( + bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock())) + + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) + instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) + + # Create response_pb + NOW = datetime.datetime.utcnow() + NOW_PB = _datetime_to_pb_timestamp(NOW) + metadata = messages_v2_pb2.CreateInstanceMetadata(request_time=NOW_PB) + type_url = 'type.googleapis.com/%s' % ( + messages_v2_pb2.CreateInstanceMetadata.DESCRIPTOR.full_name,) + response_pb = operations_pb2.Operation( + name=self.OP_NAME, + metadata=Any( + type_url=type_url, + value=metadata.SerializeToString(), + ) + ) + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + stub = _FakeStub(response_pb) + client._instance_admin_client.bigtable_instance_admin_stub = stub + update_mask = [] + + # Perform the method and check the result. + app_profile_id = 'appProfileId1262094415' + cluster_id = 'cluster-id' + result = instance.update_app_profile(app_profile_id, + update_mask=update_mask, + routing_policy_type=2, + cluster_id=cluster_id) + + self.assertIsInstance(result, operation.Operation) + + def test_delete_app_profile(self): + from google.cloud.bigtable_admin_v2.gapic import ( + bigtable_instance_admin_client) + + instance_api = ( + bigtable_instance_admin_client.BigtableInstanceAdminClient( + mock.Mock())) + credentials = _make_credentials() + client = self._make_client(project=self.PROJECT, + credentials=credentials, admin=True) + instance = self._make_one(self.INSTANCE_ID, client, self.LOCATION_ID) + + # Patch the stub used by the API method. + client._instance_admin_client = instance_api + + ignore_warnings = True + + expected_result = None # delete() has no return value. + + app_profile_id = 'appProfileId1262094415' + result = instance.delete_app_profile(app_profile_id, ignore_warnings) + + self.assertEqual(expected_result, result) + class _Client(object): @@ -419,3 +773,10 @@ def __init__(self, project): def __eq__(self, other): return (other.project == self.project and other.project_name == self.project_name) + + +def _CreateAppProfileRequestPB(*args, **kw): + from google.cloud.bigtable_admin_v2.proto import ( + bigtable_instance_admin_pb2 as instance_v2_pb2) + + return instance_v2_pb2.CreateAppProfileRequest(*args, **kw) From d303d51d839a7044e149c7cc2270ebaee7510f14 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 31 May 2018 13:17:46 -0400 Subject: [PATCH 60/75] Pass through 'session.posargs' when running Bigtable system tests. (#5418) While we're add it, use a develop install in system tests. --- bigtable/nox.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/bigtable/nox.py b/bigtable/nox.py index fe446a4a3a81..2abcce3ae2a5 100644 --- a/bigtable/nox.py +++ b/bigtable/nox.py @@ -88,10 +88,10 @@ def system(session, py): # virtualenv's dist-packages. session.install('mock', 'pytest', *LOCAL_DEPS) session.install('../test_utils/') - session.install('.') + session.install('-e', '.') # Run py.test against the system tests. - session.run('py.test', '--quiet', 'tests/system.py') + session.run('py.test', '--quiet', 'tests/system.py', *session.posargs) @nox.session From 5a79d5dfee95a37501e8d9296775ddbb69984c31 Mon Sep 17 00:00:00 2001 From: Aneep Tandel Date: Thu, 31 May 2018 23:06:42 +0530 Subject: [PATCH 61/75] BigTable: use client properties rather than private attrs (#5398) --- .../google/cloud/bigtable/column_family.py | 6 +-- bigtable/google/cloud/bigtable/instance.py | 16 +++---- bigtable/google/cloud/bigtable/row.py | 11 +++-- bigtable/google/cloud/bigtable/table.py | 47 +++++++++---------- 4 files changed, 40 insertions(+), 40 deletions(-) diff --git a/bigtable/google/cloud/bigtable/column_family.py b/bigtable/google/cloud/bigtable/column_family.py index 3e2f1cae818e..c0c5c47d6da3 100644 --- a/bigtable/google/cloud/bigtable/column_family.py +++ b/bigtable/google/cloud/bigtable/column_family.py @@ -228,7 +228,7 @@ def create(self): client = self._table._instance._client # data it contains are the GC rule and the column family ID already # stored on this instance. - client._table_admin_client.modify_column_families( + client.table_admin_client.modify_column_families( self._table.name, [modification]) def update(self): @@ -247,7 +247,7 @@ def update(self): client = self._table._instance._client # data it contains are the GC rule and the column family ID already # stored on this instance. - client._table_admin_client.modify_column_families( + client.table_admin_client.modify_column_families( self._table.name, [modification]) def delete(self): @@ -259,7 +259,7 @@ def delete(self): client = self._table._instance._client # data it contains are the GC rule and the column family ID already # stored on this instance. - client._table_admin_client.modify_column_families( + client.table_admin_client.modify_column_families( self._table.name, [modification]) diff --git a/bigtable/google/cloud/bigtable/instance.py b/bigtable/google/cloud/bigtable/instance.py index 97b90e1ab9d4..67177c040b6c 100644 --- a/bigtable/google/cloud/bigtable/instance.py +++ b/bigtable/google/cloud/bigtable/instance.py @@ -145,7 +145,7 @@ def name(self): :rtype: str :returns: Return a fully-qualified instance string. """ - return self._client._instance_admin_client.instance_path( + return self._client.instance_admin_client.instance_path( project=self._client.project, instance=self.instance_id) def __eq__(self, other): @@ -165,7 +165,7 @@ def __ne__(self, other): def reload(self): """Reload the metadata for this instance.""" - instance_pb = self._client._instance_admin_client.get_instance( + instance_pb = self._client.instance_admin_client.get_instance( self.name) # NOTE: _update_from_pb does not check that the project and @@ -194,9 +194,9 @@ def create(self): """ clusters = {} cluster_id = '{}-cluster'.format(self.instance_id) - cluster_name = self._client._instance_admin_client.cluster_path( + cluster_name = self._client.instance_admin_client.cluster_path( self._client.project, self.instance_id, cluster_id) - location = self._client._instance_admin_client.location_path( + location = self._client.instance_admin_client.location_path( self._client.project, self._cluster_location_id) cluster = instance_pb2.Cluster( name=cluster_name, location=location, @@ -208,7 +208,7 @@ def create(self): clusters[cluster_id] = cluster parent = self._client.project_path - return self._client._instance_admin_client.create_instance( + return self._client.instance_admin_client.create_instance( parent=parent, instance_id=self.instance_id, instance=instance, clusters=clusters) @@ -227,7 +227,7 @@ def update(self): before calling :meth:`update`. """ type = enums.Instance.Type.TYPE_UNSPECIFIED - self._client._instance_admin_client.update_instance( + self._client.instance_admin_client.update_instance( name=self.name, display_name=self.display_name, type_=type, labels={}) @@ -253,7 +253,7 @@ def delete(self): irrevocably disappear from the API, and their data will be permanently deleted. """ - self._client._instance_admin_client.delete_instance(name=self.name) + self._client.instance_admin_client.delete_instance(name=self.name) def table(self, table_id): """Factory to create a table associated with this instance. @@ -274,7 +274,7 @@ def list_tables(self): :raises: :class:`ValueError ` if one of the returned tables has a name that is not of the expected format. """ - table_list_pb = self._client._table_admin_client.list_tables(self.name) + table_list_pb = self._client.table_admin_client.list_tables(self.name) result = [] for table_pb in table_list_pb: diff --git a/bigtable/google/cloud/bigtable/row.py b/bigtable/google/cloud/bigtable/row.py index 73803801249f..f900e42d4720 100644 --- a/bigtable/google/cloud/bigtable/row.py +++ b/bigtable/google/cloud/bigtable/row.py @@ -417,8 +417,9 @@ def commit(self): raise ValueError('%d total mutations exceed the maximum allowable ' '%d.' % (num_mutations, MAX_MUTATIONS)) + data_client = self._table._instance._client.table_data_client commit = functools.partial( - self._table._instance._client._table_data_client.mutate_row, + data_client.mutate_row, self._table.name, self._row_key, mutations_list) retry_ = retry.Retry( predicate=_retry_commit_exception, @@ -532,8 +533,8 @@ def commit(self): 'mutations and %d false mutations.' % ( MAX_MUTATIONS, num_true_mutations, num_false_mutations)) - client = self._table._instance._client - resp = client._table_data_client.check_and_mutate_row( + data_client = self._table._instance._client.table_data_client + resp = data_client.check_and_mutate_row( table_name=self._table.name, row_key=self._row_key,) self.clear() return resp[0].predicate_matched @@ -815,8 +816,8 @@ def commit(self): raise ValueError('%d total append mutations exceed the maximum ' 'allowable %d.' % (num_mutations, MAX_MUTATIONS)) - client = self._table._instance._client - row_response = client._table_data_client.read_modify_write_row( + data_client = self._table._instance._client.table_data_client + row_response = data_client.read_modify_write_row( table_name=self._table.name, row_key=self._row_key, rules=self._rule_pb_list) diff --git a/bigtable/google/cloud/bigtable/table.py b/bigtable/google/cloud/bigtable/table.py index 22e889588d32..0e37fc28d77a 100644 --- a/bigtable/google/cloud/bigtable/table.py +++ b/bigtable/google/cloud/bigtable/table.py @@ -112,7 +112,8 @@ def name(self): """ project = self._instance._client.project instance_id = self._instance.instance_id - return self._instance._client._table_admin_client.table_path( + table_client = self._instance._client.table_admin_client + return table_client.table_path( project=project, instance=instance_id, table=self.table_id) def column_family(self, column_family_id, gc_rule=None): @@ -182,16 +183,15 @@ def create(self): :class:`._generated.table_pb2.Table` but we don't use this response. """ - client = self._instance._client + table_client = self._instance._client.table_admin_client instance_name = self._instance.name - client._table_admin_client.create_table(parent=instance_name, - table_id=self.table_id, - table={}) + table_client.create_table( + parent=instance_name, table_id=self.table_id, table={}) def delete(self): """Delete this table.""" - client = self._instance._client - client._table_admin_client.delete_table(name=self.name) + table_client = self._instance._client.table_admin_client + table_client.delete_table(name=self.name) def list_column_families(self): """List the column families owned by this table. @@ -204,8 +204,8 @@ def list_column_families(self): family name from the response does not agree with the computed name from the column family ID. """ - client = self._instance._client - table_pb = client._table_admin_client.get_table(self.name) + table_client = self._instance._client.table_admin_client + table_pb = table_client.get_table(self.name) result = {} for column_family_id, value_pb in table_pb.column_families.items(): @@ -234,9 +234,8 @@ def read_row(self, row_key, filter_=None): request_pb = _create_row_request( self.name, row_key=row_key, filter_=filter_, app_profile_id=self._app_profile_id) - client = self._instance._client - rows_data = PartialRowsData(client._table_data_client._read_rows, - request_pb) + data_client = self._instance._client.table_data_client + rows_data = PartialRowsData(data_client._read_rows, request_pb) rows_data.consume_all() if rows_data.state not in (rows_data.NEW_ROW, rows_data.START): @@ -280,12 +279,11 @@ def read_rows(self, start_key=None, end_key=None, limit=None, the streamed results. """ request_pb = _create_row_request( - self.name, start_key=start_key, end_key=end_key, filter_=filter_, - limit=limit, end_inclusive=end_inclusive, + self.name, start_key=start_key, end_key=end_key, + filter_=filter_, limit=limit, end_inclusive=end_inclusive, app_profile_id=self._app_profile_id) - client = self._instance._client - return PartialRowsData(client._table_data_client._read_rows, - request_pb) + data_client = self._instance._client.table_data_client + return PartialRowsData(data_client._read_rows, request_pb) def yield_rows(self, start_key=None, end_key=None, limit=None, filter_=None): @@ -317,9 +315,9 @@ def yield_rows(self, start_key=None, end_key=None, limit=None, request_pb = _create_row_request( self.name, start_key=start_key, end_key=end_key, filter_=filter_, limit=limit, app_profile_id=self._app_profile_id) - client = self._instance._client - generator = YieldRowsData(client._table_data_client._read_rows, - request_pb) + data_client = self._instance._client.table_data_client + generator = YieldRowsData(data_client._read_rows, request_pb) + for row in generator.read_rows(): yield row @@ -388,9 +386,10 @@ def sample_row_keys(self): or by casting to a :class:`list` and can be cancelled by calling ``cancel()``. """ - client = self._instance._client - response_iterator = client._table_data_client.sample_row_keys( + data_client = self._instance._client.table_data_client + response_iterator = data_client.sample_row_keys( self.name, app_profile_id=self._app_profile_id) + return response_iterator @@ -478,8 +477,8 @@ def _do_mutate_retryable_rows(self): mutate_rows_request = _mutate_rows_request( self.table_name, retryable_rows, app_profile_id=self.app_profile_id) - responses = self.client._table_data_client._mutate_rows( - mutate_rows_request, retry=None) + data_client = self.client.table_data_client + responses = data_client._mutate_rows(mutate_rows_request, retry=None) num_responses = 0 num_retryable_responses = 0 From 102f0f80acda3e44a9cac8d8756324a2eab9f8f9 Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Thu, 31 May 2018 10:39:24 -0700 Subject: [PATCH 62/75] BigQuery: Adds schema update options and snippets (#5415) --- bigquery/google/cloud/bigquery/__init__.py | 2 + bigquery/google/cloud/bigquery/job.py | 50 ++++ bigquery/tests/unit/test_job.py | 34 ++- docs/bigquery/reference.rst | 1 + docs/bigquery/snippets.py | 266 +++++++++++++++++++++ 5 files changed, 351 insertions(+), 2 deletions(-) diff --git a/bigquery/google/cloud/bigquery/__init__.py b/bigquery/google/cloud/bigquery/__init__.py index 503c83bfb85c..751efd5a671b 100644 --- a/bigquery/google/cloud/bigquery/__init__.py +++ b/bigquery/google/cloud/bigquery/__init__.py @@ -49,6 +49,7 @@ from google.cloud.bigquery.job import QueryJob from google.cloud.bigquery.job import QueryJobConfig from google.cloud.bigquery.job import QueryPriority +from google.cloud.bigquery.job import SchemaUpdateOption from google.cloud.bigquery.job import SourceFormat from google.cloud.bigquery.job import UnknownJob from google.cloud.bigquery.job import WriteDisposition @@ -113,6 +114,7 @@ 'DestinationFormat', 'Encoding', 'QueryPriority', + 'SchemaUpdateOption', 'SourceFormat', 'WriteDisposition' ] diff --git a/bigquery/google/cloud/bigquery/job.py b/bigquery/google/cloud/bigquery/job.py index 393dcdf5dad7..5026445f0287 100644 --- a/bigquery/google/cloud/bigquery/job.py +++ b/bigquery/google/cloud/bigquery/job.py @@ -206,6 +206,18 @@ class WriteDisposition(object): returned in the job result.""" +class SchemaUpdateOption(object): + """Specifies an update to the destination table schema as a side effect of + a load job. + """ + + ALLOW_FIELD_ADDITION = 'ALLOW_FIELD_ADDITION' + """Allow adding a nullable field to the schema.""" + + ALLOW_FIELD_RELAXATION = 'ALLOW_FIELD_RELAXATION' + """Allow relaxing a required field in the original schema to nullable.""" + + class _JobReference(object): """A reference to a job. @@ -1004,6 +1016,18 @@ def time_partitioning(self, value): api_repr = value.to_api_repr() self._set_sub_prop('timePartitioning', api_repr) + @property + def schema_update_options(self): + """List[google.cloud.bigquery.job.SchemaUpdateOption]: Specifies + updates to the destination table schema to allow as a side effect of + the load job. + """ + return self._get_sub_prop('schemaUpdateOptions') + + @schema_update_options.setter + def schema_update_options(self, values): + self._set_sub_prop('schemaUpdateOptions', values) + class LoadJob(_AsyncJob): """Asynchronous job for loading data into a table. @@ -1158,6 +1182,13 @@ def time_partitioning(self): """ return self._configuration.time_partitioning + @property + def schema_update_options(self): + """See + :attr:`google.cloud.bigquery.job.LoadJobConfig.schema_update_options`. + """ + return self._configuration.schema_update_options + @property def input_file_bytes(self): """Count of bytes loaded from source files. @@ -1971,6 +2002,18 @@ def time_partitioning(self, value): api_repr = value.to_api_repr() self._set_sub_prop('timePartitioning', api_repr) + @property + def schema_update_options(self): + """List[google.cloud.bigquery.job.SchemaUpdateOption]: Specifies + updates to the destination table schema to allow as a side effect of + the query job. + """ + return self._get_sub_prop('schemaUpdateOptions') + + @schema_update_options.setter + def schema_update_options(self, values): + self._set_sub_prop('schemaUpdateOptions', values) + def to_api_repr(self): """Build an API representation of the query job config. @@ -2149,6 +2192,13 @@ def time_partitioning(self): """ return self._configuration.time_partitioning + @property + def schema_update_options(self): + """See + :attr:`google.cloud.bigquery.job.QueryJobConfig.schema_update_options`. + """ + return self._configuration.schema_update_options + def _build_resource(self): """Generate a resource for :meth:`begin`.""" configuration = self._configuration.to_api_repr() diff --git a/bigquery/tests/unit/test_job.py b/bigquery/tests/unit/test_job.py index a76d05f4a077..a8d0ed96299f 100644 --- a/bigquery/tests/unit/test_job.py +++ b/bigquery/tests/unit/test_job.py @@ -380,6 +380,11 @@ def _verifyEnumConfigProperties(self, job, config): config['writeDisposition']) else: self.assertIsNone(job.write_disposition) + if 'schemaUpdateOptions' in config: + self.assertEqual( + job.schema_update_options, config['schemaUpdateOptions']) + else: + self.assertIsNone(job.schema_update_options) def _verifyResourceProperties(self, job, resource): self._verifyReadonlyResourceProperties(job, resource) @@ -467,6 +472,7 @@ def test_ctor(self): self.assertIsNone(job.write_disposition) self.assertIsNone(job.destination_encryption_configuration) self.assertIsNone(job.time_partitioning) + self.assertIsNone(job.schema_update_options) def test_ctor_w_config(self): from google.cloud.bigquery.schema import SchemaField @@ -780,6 +786,7 @@ def test_begin_w_autodetect(self): def test_begin_w_alternate_client(self): from google.cloud.bigquery.job import CreateDisposition + from google.cloud.bigquery.job import SchemaUpdateOption from google.cloud.bigquery.job import WriteDisposition from google.cloud.bigquery.schema import SchemaField @@ -817,7 +824,10 @@ def test_begin_w_alternate_client(self): 'mode': 'REQUIRED', 'description': None, }, - ]} + ]}, + 'schemaUpdateOptions': [ + SchemaUpdateOption.ALLOW_FIELD_ADDITION, + ], } RESOURCE['configuration']['load'] = LOAD_CONFIGURATION conn1 = _make_connection() @@ -842,6 +852,9 @@ def test_begin_w_alternate_client(self): config.skip_leading_rows = 1 config.source_format = 'CSV' config.write_disposition = WriteDisposition.WRITE_TRUNCATE + config.schema_update_options = [ + SchemaUpdateOption.ALLOW_FIELD_ADDITION, + ] job._begin(client=client2) @@ -2127,6 +2140,11 @@ def _verifyResourceProperties(self, job, resource): 'kmsKeyName']) else: self.assertIsNone(job.destination_encryption_configuration) + if 'schemaUpdateOptions' in query_config: + self.assertEqual( + job.schema_update_options, query_config['schemaUpdateOptions']) + else: + self.assertIsNone(job.schema_update_options) def test_ctor_defaults(self): client = _make_client(project=self.PROJECT) @@ -2157,6 +2175,7 @@ def test_ctor_defaults(self): self.assertIsNone(job.table_definitions) self.assertIsNone(job.destination_encryption_configuration) self.assertIsNone(job.time_partitioning) + self.assertIsNone(job.schema_update_options) def test_ctor_w_udf_resources(self): from google.cloud.bigquery.job import QueryJobConfig @@ -2248,6 +2267,7 @@ def test_from_api_repr_with_encryption(self): def test_from_api_repr_w_properties(self): from google.cloud.bigquery.job import CreateDisposition + from google.cloud.bigquery.job import SchemaUpdateOption from google.cloud.bigquery.job import WriteDisposition client = _make_client(project=self.PROJECT) @@ -2260,6 +2280,9 @@ def test_from_api_repr_w_properties(self): 'datasetId': self.DS_ID, 'tableId': self.DESTINATION_TABLE, } + query_config['schemaUpdateOptions'] = [ + SchemaUpdateOption.ALLOW_FIELD_ADDITION, + ] klass = self._get_target_class() job = klass.from_api_repr(RESOURCE, client=client) self.assertIs(job._client, client) @@ -2841,6 +2864,7 @@ def test_begin_w_alternate_client(self): from google.cloud.bigquery.job import CreateDisposition from google.cloud.bigquery.job import QueryJobConfig from google.cloud.bigquery.job import QueryPriority + from google.cloud.bigquery.job import SchemaUpdateOption from google.cloud.bigquery.job import WriteDisposition PATH = '/projects/%s/jobs' % (self.PROJECT,) @@ -2866,7 +2890,10 @@ def test_begin_w_alternate_client(self): 'useLegacySql': True, 'writeDisposition': WriteDisposition.WRITE_TRUNCATE, 'maximumBillingTier': 4, - 'maximumBytesBilled': '123456' + 'maximumBytesBilled': '123456', + 'schemaUpdateOptions': [ + SchemaUpdateOption.ALLOW_FIELD_RELAXATION, + ] } RESOURCE['configuration']['query'] = QUERY_CONFIGURATION RESOURCE['configuration']['dryRun'] = True @@ -2890,6 +2917,9 @@ def test_begin_w_alternate_client(self): config.use_query_cache = True config.write_disposition = WriteDisposition.WRITE_TRUNCATE config.maximum_bytes_billed = 123456 + config.schema_update_options = [ + SchemaUpdateOption.ALLOW_FIELD_RELAXATION, + ] job = self._make_one( self.JOB_ID, self.QUERY, client1, job_config=config) diff --git a/docs/bigquery/reference.rst b/docs/bigquery/reference.rst index c1546f915bd6..995e8a154a67 100644 --- a/docs/bigquery/reference.rst +++ b/docs/bigquery/reference.rst @@ -62,6 +62,7 @@ Job-Related Types job.QueryPriority job.SourceFormat job.WriteDisposition + job.SchemaUpdateOption Dataset diff --git a/docs/bigquery/snippets.py b/docs/bigquery/snippets.py index 33939ff9ec3c..05a9a3655141 100644 --- a/docs/bigquery/snippets.py +++ b/docs/bigquery/snippets.py @@ -1365,6 +1365,148 @@ def test_load_table_from_uri_truncate(client, to_delete): # [END bigquery_load_table_gcs_parquet_truncate] +def test_load_table_add_column(client, to_delete): + dataset_id = 'load_table_add_column_{}'.format(_millis()) + dataset_ref = client.dataset(dataset_id) + dataset = bigquery.Dataset(dataset_ref) + dataset.location = 'US' + dataset = client.create_dataset(dataset) + to_delete.append(dataset) + + snippets_dir = os.path.abspath(os.path.dirname(__file__)) + filepath = os.path.join( + snippets_dir, '..', '..', 'bigquery', 'tests', 'data', 'people.csv') + table_ref = dataset_ref.table('my_table') + old_schema = [ + bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), + ] + table = client.create_table(bigquery.Table(table_ref, schema=old_schema)) + + # [START bigquery_add_column_load_append] + # from google.cloud import bigquery + # client = bigquery.Client() + # dataset_ref = client.dataset('my_dataset') + # filepath = 'path/to/your_file.csv' + + # Retrieves the destination table and checks the length of the schema + table_id = 'my_table' + table_ref = dataset_ref.table(table_id) + table = client.get_table(table_ref) + print("Table {} contains {} columns.".format(table_id, len(table.schema))) + + # Configures the load job to append the data to the destination table, + # allowing field addition + job_config = bigquery.LoadJobConfig() + job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND + job_config.schema_update_options = [ + bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION, + ] + # In this example, the existing table contains only the 'full_name' column. + # 'REQUIRED' fields cannot be added to an existing schema, so the + # additional column must be 'NULLABLE'. + job_config.schema = [ + bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), + bigquery.SchemaField('age', 'INTEGER', mode='NULLABLE'), + ] + job_config.source_format = bigquery.SourceFormat.CSV + job_config.skip_leading_rows = 1 + + with open(filepath, 'rb') as source_file: + job = client.load_table_from_file( + source_file, + table_ref, + location='US', # Must match the destination dataset location. + job_config=job_config) # API request + + job.result() # Waits for table load to complete. + print('Loaded {} rows into {}:{}.'.format( + job.output_rows, dataset_id, table_ref.table_id)) + + # Checks the updated length of the schema + table = client.get_table(table) + print("Table {} now contains {} columns.".format( + table_id, len(table.schema))) + # [END bigquery_add_column_load_append] + assert len(table.schema) == 2 + assert table.num_rows > 0 + + +def test_load_table_relax_column(client, to_delete): + dataset_id = 'load_table_relax_column_{}'.format(_millis()) + dataset_ref = client.dataset(dataset_id) + dataset = bigquery.Dataset(dataset_ref) + dataset.location = 'US' + dataset = client.create_dataset(dataset) + to_delete.append(dataset) + + snippets_dir = os.path.abspath(os.path.dirname(__file__)) + filepath = os.path.join( + snippets_dir, '..', '..', 'bigquery', 'tests', 'data', 'people.csv') + table_ref = dataset_ref.table('my_table') + old_schema = [ + bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), + bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + bigquery.SchemaField('favorite_color', 'STRING', mode='REQUIRED'), + ] + table = client.create_table(bigquery.Table(table_ref, schema=old_schema)) + + # [START bigquery_relax_column_load_append] + # from google.cloud import bigquery + # client = bigquery.Client() + # dataset_ref = client.dataset('my_dataset') + # filepath = 'path/to/your_file.csv' + + # Retrieves the destination table and checks the number of required fields + table_id = 'my_table' + table_ref = dataset_ref.table(table_id) + table = client.get_table(table_ref) + original_required_fields = sum( + field.mode == 'REQUIRED' for field in table.schema) + # In this example, the existing table has 3 required fields. + print("{} fields in the schema are required.".format( + original_required_fields)) + + # Configures the load job to append the data to a destination table, + # allowing field relaxation + job_config = bigquery.LoadJobConfig() + job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND + job_config.schema_update_options = [ + bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION, + ] + # In this example, the existing table contains three required fields + # ('full_name', 'age', and 'favorite_color'), while the data to load + # contains only the first two fields. + job_config.schema = [ + bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), + bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + ] + job_config.source_format = bigquery.SourceFormat.CSV + job_config.skip_leading_rows = 1 + + with open(filepath, 'rb') as source_file: + job = client.load_table_from_file( + source_file, + table_ref, + location='US', # Must match the destination dataset location. + job_config=job_config) # API request + + job.result() # Waits for table load to complete. + print('Loaded {} rows into {}:{}.'.format( + job.output_rows, dataset_id, table_ref.table_id)) + + # Checks the updated number of required fields + table = client.get_table(table) + current_required_fields = sum( + field.mode == 'REQUIRED' for field in table.schema) + print("{} fields in the schema are now required.".format( + current_required_fields)) + # [END bigquery_relax_column_load_append] + assert original_required_fields - current_required_fields == 1 + assert len(table.schema) == 3 + assert table.schema[2].mode == 'NULLABLE' + assert table.num_rows > 0 + + def _write_csv_to_storage(bucket_name, blob_name, header_row, data_rows): import csv from google.cloud._testing import _NamedTemporaryFile @@ -1810,6 +1952,130 @@ def test_client_query_destination_table_cmek(client, to_delete): # [END bigquery_query_destination_table_cmek] +def test_client_query_relax_column(client, to_delete): + dataset_id = 'query_relax_column_{}'.format(_millis()) + dataset_ref = client.dataset(dataset_id) + dataset = bigquery.Dataset(dataset_ref) + dataset.location = 'US' + dataset = client.create_dataset(dataset) + to_delete.append(dataset) + + table_ref = dataset_ref.table('my_table') + schema = [ + bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), + bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + ] + table = client.create_table( + bigquery.Table(table_ref, schema=schema)) + + # [START bigquery_relax_column_query_append] + # from google.cloud import bigquery + # client = bigquery.Client() + # dataset_ref = client.dataset('my_dataset') + + # Retrieves the destination table and checks the number of required fields + table_id = 'my_table' + table_ref = dataset_ref.table(table_id) + table = client.get_table(table_ref) + original_required_fields = sum( + field.mode == 'REQUIRED' for field in table.schema) + # In this example, the existing table has 2 required fields + print("{} fields in the schema are required.".format( + original_required_fields)) + + # Configures the query to append the results to a destination table, + # allowing field relaxation + job_config = bigquery.QueryJobConfig() + job_config.schema_update_options = [ + bigquery.SchemaUpdateOption.ALLOW_FIELD_RELAXATION, + ] + job_config.destination = table_ref + job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND + + query_job = client.query( + # In this example, the existing table contains 'full_name' and 'age' as + # required columns, but the query results will omit the second column. + 'SELECT "Beyonce" as full_name;', + # Location must match that of the dataset(s) referenced in the query + # and of the destination table. + location='US', + job_config=job_config + ) # API request - starts the query + + query_job.result() # Waits for the query to finish + print("Query job {} complete.".format(query_job.job_id)) + + # Checks the updated number of required fields + table = client.get_table(table) + current_required_fields = sum( + field.mode == 'REQUIRED' for field in table.schema) + print("{} fields in the schema are now required.".format( + current_required_fields)) + # [END bigquery_relax_column_query_append] + assert original_required_fields - current_required_fields > 0 + assert len(table.schema) == 2 + assert table.schema[1].mode == 'NULLABLE' + assert table.num_rows > 0 + + +def test_client_query_add_column(client, to_delete): + dataset_id = 'query_add_column_{}'.format(_millis()) + dataset_ref = client.dataset(dataset_id) + dataset = bigquery.Dataset(dataset_ref) + dataset.location = 'US' + dataset = client.create_dataset(dataset) + to_delete.append(dataset) + + table_ref = dataset_ref.table('my_table') + schema = [ + bigquery.SchemaField('full_name', 'STRING', mode='REQUIRED'), + bigquery.SchemaField('age', 'INTEGER', mode='REQUIRED'), + ] + table = client.create_table(bigquery.Table(table_ref, schema=schema)) + + # [START bigquery_add_column_query_append] + # from google.cloud import bigquery + # client = bigquery.Client() + # dataset_ref = client.dataset('my_dataset') + + # Retrieves the destination table and checks the length of the schema + table_id = 'my_table' + table_ref = dataset_ref.table(table_id) + table = client.get_table(table_ref) + print("Table {} contains {} columns.".format(table_id, len(table.schema))) + + # Configures the query to append the results to a destination table, + # allowing field addition + job_config = bigquery.QueryJobConfig() + job_config.schema_update_options = [ + bigquery.SchemaUpdateOption.ALLOW_FIELD_ADDITION, + ] + job_config.destination = table_ref + job_config.write_disposition = bigquery.WriteDisposition.WRITE_APPEND + + query_job = client.query( + # In this example, the existing table contains only the 'full_name' and + # 'age' columns, while the results of this query will contain an + # additional 'favorite_color' column. + 'SELECT "Timmy" as full_name, 85 as age, "Blue" as favorite_color;', + # Location must match that of the dataset(s) referenced in the query + # and of the destination table. + location='US', + job_config=job_config + ) # API request - starts the query + + query_job.result() # Waits for the query to finish + print("Query job {} complete.".format(query_job.job_id)) + + # Checks the updated length of the schema + table = client.get_table(table) + print("Table {} now contains {} columns.".format( + table_id, len(table.schema))) + # [END bigquery_add_column_query_append] + assert len(table.schema) == 3 + assert table.num_rows > 0 + + def test_client_query_w_named_params(client, capsys): """Run a query using named query parameters""" From 6465336ef87c2ad0aa8563a2079216c959bbae7a Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Thu, 31 May 2018 16:09:46 -0400 Subject: [PATCH 63/75] Add aliases for new V3 service clients. (#5424) Closes #5311. --- monitoring/google/cloud/monitoring.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/monitoring/google/cloud/monitoring.py b/monitoring/google/cloud/monitoring.py index 0cf5c570ceb4..a3f697504ebf 100644 --- a/monitoring/google/cloud/monitoring.py +++ b/monitoring/google/cloud/monitoring.py @@ -15,14 +15,21 @@ from __future__ import absolute_import from google.cloud.monitoring_v3.query import Query +from google.cloud.monitoring_v3 import AlertPolicyServiceClient from google.cloud.monitoring_v3 import GroupServiceClient from google.cloud.monitoring_v3 import MetricServiceClient +from google.cloud.monitoring_v3 import NotificationChannelServiceClient +from google.cloud.monitoring_v3 import UptimeCheckServiceClient from google.cloud.monitoring_v3 import enums from google.cloud.monitoring_v3 import types __all__ = ( 'enums', 'types', + 'AlertPolicyServiceClient', 'GroupServiceClient', + 'MetricServiceClient', + 'NotificationChannelServiceClient', + 'UptimeCheckServiceClient', 'Query', - 'MetricServiceClient', ) +) From 6f141ffceacfb3fdab6bc2b6d5bdbab52a53d600 Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Fri, 1 Jun 2018 16:13:02 -0700 Subject: [PATCH 64/75] BigQuery: Adds time filtering to client.list_jobs() (#5429) --- bigquery/google/cloud/bigquery/client.py | 53 ++++++++++++++---------- bigquery/tests/unit/test_client.py | 17 ++++++++ 2 files changed, 49 insertions(+), 21 deletions(-) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index 8e303610d082..23f3c40366de 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -33,6 +33,7 @@ from google.cloud.bigquery._helpers import DEFAULT_RETRY from google.cloud.bigquery._helpers import _SCALAR_VALUE_TO_JSON_ROW +from google.cloud.bigquery._helpers import _str_or_none from google.cloud.bigquery._http import Connection from google.cloud.bigquery.dataset import Dataset from google.cloud.bigquery.dataset import DatasetListItem @@ -662,52 +663,62 @@ def cancel_job( def list_jobs( self, project=None, max_results=None, page_token=None, - all_users=None, state_filter=None, retry=DEFAULT_RETRY): + all_users=None, state_filter=None, retry=DEFAULT_RETRY, + min_creation_time=None, max_creation_time=None): """List jobs for the project associated with this client. See https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/list Args: - project (str): - Optional. Project ID to use for retreiving datasets. Defaults + project (str, optional): + Project ID to use for retreiving datasets. Defaults to the client's project. - max_results (int): - Optional. Maximum number of jobs to return. - page_token (str): - Optional. Opaque marker for the next "page" of jobs. If not + max_results (int, optional): + Maximum number of jobs to return. + page_token (str, optional): + Opaque marker for the next "page" of jobs. If not passed, the API will return the first page of jobs. The token marks the beginning of the iterator to be returned and the value of the ``page_token`` can be accessed at ``next_page_token`` of :class:`~google.api_core.page_iterator.HTTPIterator`. - all_users (bool): + all_users (bool, optional): If true, include jobs owned by all users in the project. - state_filter (str): - Optional. If set, include only jobs matching the given - state. One of - + state_filter (str, optional): + If set, include only jobs matching the given state. One of: * ``"done"`` * ``"pending"`` * ``"running"`` - retry (google.api_core.retry.Retry): - Optional. How to retry the RPC. + retry (google.api_core.retry.Retry, optional): + How to retry the RPC. + min_creation_time (int, optional): + Min value for job creation time, in milliseconds since the + POSIX epoch. If set, only jobs created after or at this + timestamp are returned. + max_creation_time (int, optional): + Max value for job creation time, in milliseconds since the + POSIX epoch. If set, only jobs created before or at this + timestamp are returned. Returns: google.api_core.page_iterator.Iterator: Iterable of job instances. """ - extra_params = {'projection': 'full'} + extra_params = { + 'allUsers': all_users, + 'stateFilter': state_filter, + 'minCreationTime': _str_or_none(min_creation_time), + 'maxCreationTime': _str_or_none(max_creation_time), + 'projection': 'full' + } + + extra_params = {param: value for param, value in extra_params.items() + if value is not None} if project is None: project = self.project - if all_users is not None: - extra_params['allUsers'] = all_users - - if state_filter is not None: - extra_params['stateFilter'] = state_filter - path = '/projects/%s/jobs' % (project,) return page_iterator.HTTPIterator( client=self, diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index 785f98b8e8bf..fe09b5e74556 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -1753,6 +1753,23 @@ def test_list_jobs_w_project(self): 'projection': 'full', }) + def test_list_jobs_w_time_filter(self): + creds = _make_credentials() + client = self._make_one(self.PROJECT, creds) + conn = client._connection = _make_connection({}) + + list(client.list_jobs( + min_creation_time=1, max_creation_time=1527874895820)) + + conn.api_request.assert_called_once_with( + method='GET', + path='/projects/%s/jobs' % self.PROJECT, + query_params={ + 'projection': 'full', + 'minCreationTime': '1', + 'maxCreationTime': '1527874895820', + }) + def test_load_table_from_uri(self): from google.cloud.bigquery.job import LoadJob From 52dc9df8c7ef9cc23988bccbd3aae0cdf7d26936 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 4 Jun 2018 10:03:22 -0700 Subject: [PATCH 65/75] BigQuery: Use datetime for jobs listing filter args. (#5431) datetime.datetime will be easier to work with for filtering things like "all the jobs run last month" or "all the jobs in the past 5 minutes". --- bigquery/google/cloud/bigquery/client.py | 25 ++++++++++++++---------- bigquery/tests/unit/test_client.py | 11 +++++++++-- 2 files changed, 24 insertions(+), 12 deletions(-) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index 23f3c40366de..8215a629a12b 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -28,6 +28,7 @@ from google.resumable_media.requests import ResumableUpload from google.api_core import page_iterator +import google.cloud._helpers from google.cloud import exceptions from google.cloud.client import ClientWithProject @@ -692,14 +693,14 @@ def list_jobs( * ``"running"`` retry (google.api_core.retry.Retry, optional): How to retry the RPC. - min_creation_time (int, optional): - Min value for job creation time, in milliseconds since the - POSIX epoch. If set, only jobs created after or at this - timestamp are returned. - max_creation_time (int, optional): - Max value for job creation time, in milliseconds since the - POSIX epoch. If set, only jobs created before or at this - timestamp are returned. + min_creation_time (datetitme.datetime, optional): + Min value for job creation time. If set, only jobs created + after or at this timestamp are returned. If the datetime has + no time zone assumes UTC time. + max_creation_time (datetime.datetime, optional): + Max value for job creation time. If set, only jobs created + before or at this timestamp are returned. If the datetime has + no time zone assumes UTC time. Returns: google.api_core.page_iterator.Iterator: @@ -708,8 +709,12 @@ def list_jobs( extra_params = { 'allUsers': all_users, 'stateFilter': state_filter, - 'minCreationTime': _str_or_none(min_creation_time), - 'maxCreationTime': _str_or_none(max_creation_time), + 'minCreationTime': _str_or_none( + google.cloud._helpers._millis_from_datetime( + min_creation_time)), + 'maxCreationTime': _str_or_none( + google.cloud._helpers._millis_from_datetime( + max_creation_time)), 'projection': 'full' } diff --git a/bigquery/tests/unit/test_client.py b/bigquery/tests/unit/test_client.py index fe09b5e74556..49ce16cea115 100644 --- a/bigquery/tests/unit/test_client.py +++ b/bigquery/tests/unit/test_client.py @@ -13,6 +13,7 @@ # limitations under the License. import copy +import datetime import decimal import email import io @@ -1758,8 +1759,14 @@ def test_list_jobs_w_time_filter(self): client = self._make_one(self.PROJECT, creds) conn = client._connection = _make_connection({}) + # One millisecond after the unix epoch. + start_time = datetime.datetime(1970, 1, 1, 0, 0, 0, 1000) + # One millisecond after the the 2038 31-bit signed int rollover + end_time = datetime.datetime(2038, 1, 19, 3, 14, 7, 1000) + end_time_millis = (((2 ** 31) - 1) * 1000) + 1 + list(client.list_jobs( - min_creation_time=1, max_creation_time=1527874895820)) + min_creation_time=start_time, max_creation_time=end_time)) conn.api_request.assert_called_once_with( method='GET', @@ -1767,7 +1774,7 @@ def test_list_jobs_w_time_filter(self): query_params={ 'projection': 'full', 'minCreationTime': '1', - 'maxCreationTime': '1527874895820', + 'maxCreationTime': str(end_time_millis), }) def test_load_table_from_uri(self): From b1620bf734422cedbab4527ec1e7fcea486196e7 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Mon, 4 Jun 2018 10:39:38 -0700 Subject: [PATCH 66/75] BigQuery: fix datetitme typo in list_jobs docstring (#5433) --- bigquery/google/cloud/bigquery/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index 8215a629a12b..4812a31e761f 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -693,7 +693,7 @@ def list_jobs( * ``"running"`` retry (google.api_core.retry.Retry, optional): How to retry the RPC. - min_creation_time (datetitme.datetime, optional): + min_creation_time (datetime.datetime, optional): Min value for job creation time. If set, only jobs created after or at this timestamp are returned. If the datetime has no time zone assumes UTC time. From 450cf7f8fe8f7927d6c052c9d0a4187bc6053e91 Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Tue, 5 Jun 2018 12:16:10 -0700 Subject: [PATCH 67/75] BigQuery: Adds examples of optional params for client.list_jobs() snippet (#5436) --- bigquery/google/cloud/bigquery/client.py | 1 + docs/bigquery/snippets.py | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/bigquery/google/cloud/bigquery/client.py b/bigquery/google/cloud/bigquery/client.py index 4812a31e761f..b5ef0bc4e7f2 100644 --- a/bigquery/google/cloud/bigquery/client.py +++ b/bigquery/google/cloud/bigquery/client.py @@ -686,6 +686,7 @@ def list_jobs( :class:`~google.api_core.page_iterator.HTTPIterator`. all_users (bool, optional): If true, include jobs owned by all users in the project. + Defaults to :data:`False`. state_filter (str, optional): If set, include only jobs matching the given state. One of: * ``"done"`` diff --git a/docs/bigquery/snippets.py b/docs/bigquery/snippets.py index 05a9a3655141..0e1c88019c7c 100644 --- a/docs/bigquery/snippets.py +++ b/docs/bigquery/snippets.py @@ -2297,11 +2297,31 @@ def test_client_list_jobs(client): # [START bigquery_list_jobs] # from google.cloud import bigquery # client = bigquery.Client(project='my_project') + import datetime # List the 10 most recent jobs in reverse chronological order. # Omit the max_results parameter to list jobs from the past 6 months. + print("Last 10 jobs:") for job in client.list_jobs(max_results=10): # API request(s) print(job.job_id) + + # The following are examples of additional optional parameters: + + # Use min_creation_time and/or max_creation_time to specify a time window. + print("Jobs from the last ten minutes:") + ten_mins_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) + for job in client.list_jobs(min_creation_time=ten_mins_ago): + print(job.job_id) + + # Use all_users to include jobs run by all users in the project. + print("Last 10 jobs run by all users:") + for job in client.list_jobs(max_results=10, all_users=True): + print("{} run by user: {}".format(job.job_id, job.user_email)) + + # Use state_filter to filter by job state. + print("Jobs currently running:") + for job in client.list_jobs(state_filter='RUNNING'): + print(job.job_id) # [END bigquery_list_jobs] From 6e2e8bffdac41defdeb790347dde93aed811e0c8 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 5 Jun 2018 13:15:55 -0700 Subject: [PATCH 68/75] Add additional error handling to unary RPCs (#5438) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 5 +++++ .../_protocol/streaming_pull_manager.py | 12 +++++++++--- .../subscriber/test_streaming_pull_manager.py | 17 +++++++++++++++++ 3 files changed, 31 insertions(+), 3 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index 80824c55022b..a0bedf7e3d8e 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -481,6 +481,11 @@ def _thread_main(self): '%s caught unexpected exception %s and will exit.', _BIDIRECTIONAL_CONSUMER_NAME, exc) + else: + _LOGGER.error( + 'The bidirectional RPC unexpectedly exited. %s', + self._bidi_rpc.call) + _LOGGER.info('%s exiting', _BIDIRECTIONAL_CONSUMER_NAME) def start(self): diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index 24d57e1ee90b..edc7cfbf8802 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -261,7 +261,13 @@ def _send_unary_request(self, request): def send(self, request): """Queue a request to be sent to the RPC.""" if self._UNARY_REQUESTS: - self._send_unary_request(request) + try: + self._send_unary_request(request) + except exceptions.GoogleAPICallError as exc: + _LOGGER.debug( + 'Exception while sending unary RPC. This is typically ' + 'non-fatal as stream requests are best-effort.', + exc_info=True) else: self._rpc.send(request) @@ -433,9 +439,9 @@ def _should_recover(self, exception): # If this is in the list of idempotent exceptions, then we want to # recover. if isinstance(exception, _RETRYABLE_STREAM_ERRORS): - logging.info('Observed recoverable stream error %s', exception) + _LOGGER.info('Observed recoverable stream error %s', exception) return True - logging.info('Observed non-recoverable stream error %s', exception) + _LOGGER.info('Observed non-recoverable stream error %s', exception) return False def _on_rpc_done(self, future): diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py index 53b23dd7049f..5f2a8f53fb9f 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_streaming_pull_manager.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import logging + import mock import pytest @@ -207,6 +209,21 @@ def test_send_unary_empty(): manager._client.modify_ack_deadline.assert_not_called() +def test_send_unary_error(caplog): + caplog.set_level(logging.DEBUG) + + manager = make_manager() + manager._UNARY_REQUESTS = True + + error = exceptions.GoogleAPICallError('The front fell off') + manager._client.acknowledge.side_effect = error + + manager.send(types.StreamingPullRequest( + ack_ids=['ack_id1', 'ack_id2'])) + + assert 'The front fell off' in caplog.text + + def test_send_streaming(): manager = make_manager() manager._UNARY_REQUESTS = False From 85f12d1c4b121ed4228d3a335d0880056a8250ca Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Tue, 5 Jun 2018 13:49:24 -0700 Subject: [PATCH 69/75] Release pubsub 0.35.3 (#5439) --- pubsub/CHANGELOG.md | 6 ++++++ pubsub/setup.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/pubsub/CHANGELOG.md b/pubsub/CHANGELOG.md index 85af38e15069..4c525ab2a09b 100644 --- a/pubsub/CHANGELOG.md +++ b/pubsub/CHANGELOG.md @@ -4,6 +4,12 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.35.3 + +### Implementation Changes + +- Add additional error handling to unary RPCs (#5438) + ## 0.35.2 ### Implementation Changes diff --git a/pubsub/setup.py b/pubsub/setup.py index fd97318fee06..d755d3e99b2c 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.35.2' +version = '0.35.3' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From dcce8b52231a5b1c3d024c89388daba8b0b37d71 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Tue, 5 Jun 2018 16:34:17 -0700 Subject: [PATCH 70/75] BigQuery: Exclude PyArrow from Windows Python 2.7 tests (#5442) * BigQuery: Exclude PyArrow from Windows Python 2.7 tests * BigQuery: Exclude PyArrow from Windows Python 2.7 installs. --- bigquery/setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bigquery/setup.py b/bigquery/setup.py index c473dbcc2333..ef2062637442 100644 --- a/bigquery/setup.py +++ b/bigquery/setup.py @@ -35,7 +35,9 @@ ] extras = { 'pandas': 'pandas>=0.17.1', - 'pyarrow': 'pyarrow>=0.4.1', + # Exclude PyArrow dependency from Windows Python 2.7. + 'pyarrow: platform_system != "Windows" or python_version >= "3.4"': + 'pyarrow>=0.4.1', } From 4aa1ac098519841de1bfa98748516133b6dc7a7c Mon Sep 17 00:00:00 2001 From: Alix Hamilton Date: Wed, 6 Jun 2018 09:16:13 -0700 Subject: [PATCH 71/75] BigQuery: Adds missing label and cache samples (#5440) --- docs/bigquery/snippets.py | 174 +++++++++++++++++++++++++++++++++++--- 1 file changed, 164 insertions(+), 10 deletions(-) diff --git a/docs/bigquery/snippets.py b/docs/bigquery/snippets.py index 0e1c88019c7c..e8e66ec9e74e 100644 --- a/docs/bigquery/snippets.py +++ b/docs/bigquery/snippets.py @@ -147,6 +147,32 @@ def test_list_datasets(client): # [END bigquery_list_datasets] +def test_list_datasets_by_label(client, to_delete): + dataset_id = 'list_datasets_by_label_{}'.format(_millis()) + dataset = bigquery.Dataset(client.dataset(dataset_id)) + dataset.labels = {'color': 'green'} + dataset = client.create_dataset(dataset) # API request + to_delete.append(dataset) + + # [START bigquery_list_datasets_by_label] + # from google.cloud import bigquery + # client = bigquery.Client() + + # The following label filter example will find datasets with an + # arbitrary 'color' label set to 'green' + label_filter = 'labels.color:green' + datasets = list(client.list_datasets(filter=label_filter)) + + if datasets: + print('Datasets filtered by {}:'.format(label_filter)) + for dataset in datasets: # API request(s) + print('\t{}'.format(dataset.dataset_id)) + else: + print('No datasets found with this filter.') + # [END bigquery_list_datasets_by_label] + assert len(datasets) == 1 + + def test_create_dataset(client, to_delete): """Create a dataset.""" dataset_id = 'create_dataset_{}'.format(_millis()) @@ -194,11 +220,16 @@ def test_get_dataset_information(client, to_delete): dataset = client.get_dataset(dataset_ref) # API request # View dataset properties - print('Dataset ID: '.format(dataset_id)) - print('Description: '.format(dataset.description)) + print('Dataset ID: {}'.format(dataset_id)) + print('Description: {}'.format(dataset.description)) print('Labels:') - for label, value in dataset.labels.items(): - print('\t{}: {}'.format(label, value)) + labels = dataset.labels + if labels: + for label, value in labels.items(): + print('\t{}: {}'.format(label, value)) + else: + print("\tDataset has no labels defined.") + # View tables in dataset print('Tables:') tables = list(client.list_tables(dataset_ref)) # API request(s) @@ -296,9 +327,8 @@ def test_update_dataset_default_table_expiration(client, to_delete): # [END bigquery_update_dataset_expiration] -def test_update_dataset_labels(client, to_delete): - """Update a dataset's metadata.""" - dataset_id = 'update_dataset_multiple_properties_{}'.format(_millis()) +def test_manage_dataset_labels(client, to_delete): + dataset_id = 'label_dataset_{}'.format(_millis()) dataset = bigquery.Dataset(client.dataset(dataset_id)) dataset = client.create_dataset(dataset) to_delete.append(dataset) @@ -318,6 +348,41 @@ def test_update_dataset_labels(client, to_delete): assert dataset.labels == labels # [END bigquery_label_dataset] + # [START bigquery_get_dataset_labels] + # from google.cloud import bigquery + # client = bigquery.Client() + # dataset_id = 'my_dataset' + + dataset_ref = client.dataset(dataset_id) + dataset = client.get_dataset(dataset_ref) # API request + + # View dataset labels + print('Dataset ID: {}'.format(dataset_id)) + print('Labels:') + if dataset.labels: + for label, value in dataset.labels.items(): + print('\t{}: {}'.format(label, value)) + else: + print("\tDataset has no labels defined.") + # [END bigquery_get_dataset_labels] + assert dataset.labels == labels + + # [START bigquery_delete_label_dataset] + # from google.cloud import bigquery + # client = bigquery.Client() + # dataset_ref = client.dataset('my_dataset') + # dataset = client.get_dataset(dataset_ref) # API request + + # This example dataset starts with one label + assert dataset.labels == {'color': 'green'} + # To delete a label from a dataset, set its value to None + dataset.labels['color'] = None + + dataset = client.update_dataset(dataset, ['labels']) # API request + + assert dataset.labels == {} + # [END bigquery_delete_label_dataset] + def test_update_dataset_access(client, to_delete): """Update a dataset's access controls.""" @@ -480,16 +545,17 @@ def test_create_table_then_add_schema(client, to_delete): def test_create_table_cmek(client, to_delete): - DATASET_ID = 'create_table_cmek_{}'.format(_millis()) - dataset = bigquery.Dataset(client.dataset(DATASET_ID)) + dataset_id = 'create_table_cmek_{}'.format(_millis()) + dataset = bigquery.Dataset(client.dataset(dataset_id)) client.create_dataset(dataset) to_delete.append(dataset) # [START bigquery_create_table_cmek] # from google.cloud import bigquery # client = bigquery.Client() + # dataset_id = 'my_dataset' - table_ref = dataset.table('my_table') + table_ref = client.dataset(dataset_id).table('my_table') table = bigquery.Table(table_ref) # Set the encryption key to use for the table. @@ -696,6 +762,70 @@ def test_table_exists(client, to_delete): assert not table_exists(client, dataset.table('i_dont_exist')) +def test_manage_table_labels(client, to_delete): + dataset_id = 'label_table_dataset_{}'.format(_millis()) + table_id = 'label_table_{}'.format(_millis()) + dataset = bigquery.Dataset(client.dataset(dataset_id)) + client.create_dataset(dataset) + to_delete.append(dataset) + + table = bigquery.Table(dataset.table(table_id), schema=SCHEMA) + table = client.create_table(table) + to_delete.insert(0, table) + + # [START bigquery_label_table] + # from google.cloud import bigquery + # client = bigquery.Client() + # table_ref = client.dataset('my_dataset').table('my_table') + # table = client.get_table(table_ref) # API request + + assert table.labels == {} + labels = {'color': 'green'} + table.labels = labels + + table = client.update_table(table, ['labels']) # API request + + assert table.labels == labels + # [END bigquery_label_table] + + # [START bigquery_get_table_labels] + # from google.cloud import bigquery + # client = bigquery.Client() + # dataset_id = 'my_dataset' + # table_id = 'my_table' + + dataset_ref = client.dataset(dataset_id) + table_ref = dataset_ref.table(table_id) + table = client.get_table(table_ref) # API Request + + # View table labels + print('Table ID: {}'.format(table_id)) + print('Labels:') + if table.labels: + for label, value in table.labels.items(): + print('\t{}: {}'.format(label, value)) + else: + print("\tTable has no labels defined.") + # [END bigquery_get_table_labels] + assert table.labels == labels + + # [START bigquery_delete_label_table] + # from google.cloud import bigquery + # client = bigquery.Client() + # table_ref = client.dataset('my_dataset').table('my_table') + # table = client.get_table(table_ref) # API request + + # This example table starts with one label + assert table.labels == {'color': 'green'} + # To delete a label from a table, set its value to None + table.labels['color'] = None + + table = client.update_table(table, ['labels']) # API request + + assert table.labels == {} + # [END bigquery_delete_label_table] + + def test_update_table_description(client, to_delete): """Update a table's description.""" dataset_id = 'update_table_description_dataset_{}'.format(_millis()) @@ -2291,6 +2421,30 @@ def test_client_query_dry_run(client): assert query_job.total_bytes_processed > 0 +def test_query_no_cache(client): + # [START bigquery_query_no_cache] + # from google.cloud import bigquery + # client = bigquery.Client() + + job_config = bigquery.QueryJobConfig() + job_config.use_query_cache = False + sql = """ + SELECT corpus + FROM `bigquery-public-data.samples.shakespeare` + GROUP BY corpus; + """ + query_job = client.query( + sql, + # Location must match that of the dataset(s) referenced in the query. + location='US', + job_config=job_config) # API request + + # Print the results. + for row in query_job: # API request - fetches results + print(row) + # [END bigquery_query_no_cache] + + def test_client_list_jobs(client): """List jobs for a project.""" From f80a25eb0fc72c889eb8491d2b3a2855cf020840 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 6 Jun 2018 12:07:54 -0700 Subject: [PATCH 72/75] Use operational lock when checking for activity on streams (#5445) --- .../cloud/pubsub_v1/subscriber/_protocol/bidi.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index a0bedf7e3d8e..fd16879e50b7 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -310,7 +310,7 @@ def should_recover(exc): def __init__(self, start_rpc, should_recover, initial_request=None): super(ResumableBidiRpc, self).__init__(start_rpc, initial_request) self._should_recover = should_recover - self._operational_lock = threading.Lock() + self._operational_lock = threading.RLock() self._finalized = False self._finalize_lock = threading.Lock() @@ -393,6 +393,14 @@ def recv(self): return self._recoverable( super(ResumableBidiRpc, self).recv) + @property + def is_active(self): + """bool: True if this stream is currently open and active.""" + # Use the operational lock. It's entirely possible for something + # to check the active state *while* the RPC is being retried. + with self._operational_lock: + return self.call is not None and self.call.is_active() + class BackgroundConsumer(object): """A bi-directional stream consumer that runs in a separate thread. From ce35cd308b1983f572927fcab401828924591be3 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 6 Jun 2018 14:17:10 -0700 Subject: [PATCH 73/75] Recover streams during the gRPC error callback (#5446) --- .../pubsub_v1/subscriber/_protocol/bidi.py | 25 +++++++++++++++---- .../_protocol/streaming_pull_manager.py | 1 + .../unit/pubsub_v1/subscriber/test_bidi.py | 6 ++++- 3 files changed, 26 insertions(+), 6 deletions(-) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py index fd16879e50b7..00877e70058e 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/bidi.py @@ -235,7 +235,9 @@ def send(self, request): raise ValueError( 'Can not send() on an RPC that has never been open()ed.') - if self.is_active: + # Don't use self.is_active(), as ResumableBidiRpc will overload it + # to mean something semantically different. + if self.call.is_active(): self._request_queue.put(request) else: # calling next should cause the call to raise. @@ -330,11 +332,15 @@ def _on_call_done(self, future): # "future" here is also a grpc.RpcError. if not self._should_recover(future): self._finalize(future) + else: + _LOGGER.debug('Re-opening stream from gRPC callback.') + self._reopen() def _reopen(self): with self._operational_lock: # Another thread already managed to re-open this stream. - if self.is_active: + if self.call is not None and self.call.is_active(): + _LOGGER.debug('Stream was already re-established.') return self.call = None @@ -379,10 +385,14 @@ def _recoverable(self, method, *args, **kwargs): return method(*args, **kwargs) except Exception as exc: + _LOGGER.debug('Call to retryable %r caused %s.', method, exc) if not self._should_recover(exc): self.close() + _LOGGER.debug('Not retrying %r due to %s.', method, exc) + self._finalize(exc) raise exc + _LOGGER.debug('Re-opening stream from retryable %r.', method) self._reopen() def send(self, request): @@ -398,8 +408,13 @@ def is_active(self): """bool: True if this stream is currently open and active.""" # Use the operational lock. It's entirely possible for something # to check the active state *while* the RPC is being retried. + # Also, use finalized to track the actual terminal state here. + # This is because if the stream is re-established by the gRPC thread + # it's technically possible to check this between when gRPC marks the + # RPC as inactive and when gRPC executes our callback that re-opens + # the stream. with self._operational_lock: - return self.call is not None and self.call.is_active() + return self.call is not None and not self._finalized class BackgroundConsumer(object): @@ -491,8 +506,8 @@ def _thread_main(self): else: _LOGGER.error( - 'The bidirectional RPC unexpectedly exited. %s', - self._bidi_rpc.call) + 'The bidirectional RPC unexpectedly exited. This is a truly ' + 'exceptional case. Please file a bug with your logs.') _LOGGER.info('%s exiting', _BIDIRECTIONAL_CONSUMER_NAME) diff --git a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py index edc7cfbf8802..b7b9002e2844 100644 --- a/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py +++ b/pubsub/google/cloud/pubsub_v1/subscriber/_protocol/streaming_pull_manager.py @@ -402,6 +402,7 @@ def _on_response(self, response): After the messages have all had their ack deadline updated, execute the callback for each message using the executor. """ + _LOGGER.debug( 'Scheduling callbacks for %s messages.', len(response.received_messages)) diff --git a/pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py b/pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py index b040e7f97887..2e72a757600a 100644 --- a/pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py +++ b/pubsub/tests/unit/pubsub_v1/subscriber/test_bidi.py @@ -285,13 +285,17 @@ def test_initial_state(self): assert bidi_rpc.is_active is False def test_done_callbacks_recoverable(self): - bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: True) + start_rpc = mock.create_autospec( + grpc.StreamStreamMultiCallable, instance=True) + bidi_rpc = bidi.ResumableBidiRpc(start_rpc, lambda _: True) callback = mock.Mock(spec=['__call__']) bidi_rpc.add_done_callback(callback) bidi_rpc._on_call_done(mock.sentinel.future) callback.assert_not_called() + start_rpc.assert_called_once() + assert bidi_rpc.is_active def test_done_callbacks_non_recoverable(self): bidi_rpc = bidi.ResumableBidiRpc(None, lambda _: False) From b9d6455a2ba2604b41cbf749ccd546b67f26ba02 Mon Sep 17 00:00:00 2001 From: Thea Flowers Date: Wed, 6 Jun 2018 14:51:26 -0700 Subject: [PATCH 74/75] Release 0.35.4 (#5447) --- pubsub/CHANGELOG.md | 7 +++++++ pubsub/setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/pubsub/CHANGELOG.md b/pubsub/CHANGELOG.md index 4c525ab2a09b..8a4d16a53033 100644 --- a/pubsub/CHANGELOG.md +++ b/pubsub/CHANGELOG.md @@ -4,6 +4,13 @@ [1]: https://pypi.org/project/google-cloud-pubsub/#history +## 0.35.4 + +### Implementation Changes + +- Recover streams during the gRPC error callback. (#5446) +- Use operational lock when checking for activity on streams. (#5445) + ## 0.35.3 ### Implementation Changes diff --git a/pubsub/setup.py b/pubsub/setup.py index d755d3e99b2c..48f6ed882024 100644 --- a/pubsub/setup.py +++ b/pubsub/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-pubsub' description = 'Google Cloud Pub/Sub API client library' -version = '0.35.3' +version = '0.35.4' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta' From 02d644d87915a5a1150b4f903048db2298bdd165 Mon Sep 17 00:00:00 2001 From: Tim Swast Date: Thu, 7 Jun 2018 12:49:35 -0400 Subject: [PATCH 75/75] Release 1.3.0 (#5451) --- bigquery/CHANGELOG.md | 19 +++++++++++++++++++ bigquery/setup.py | 2 +- 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/bigquery/CHANGELOG.md b/bigquery/CHANGELOG.md index 80a98b7a98fb..7f2d04197cbc 100644 --- a/bigquery/CHANGELOG.md +++ b/bigquery/CHANGELOG.md @@ -4,6 +4,25 @@ [1]: https://pypi.org/project/google-cloud-bigquery/#history +## 1.3.0 + +### New Features + +- NUMERIC type support (#5331) +- Add timeline and top-level slot-millis to query statistics. (#5312) +- Add additional statistics to query plan stages. (#5307) + +### Documentation + +- Use autosummary to split up API reference docs (#5340) +- Fix typo in Client docstrings (#5342) + +### Internal / Testing Changes + +- Prune systests identified as reduntant to snippets. (#5365) +- Modify system tests to use prerelease versions of grpcio (#5304) +- Improve system test performance (#5319) + ## 1.2.0 ### Implementation Changes diff --git a/bigquery/setup.py b/bigquery/setup.py index ef2062637442..77dcf695f65d 100644 --- a/bigquery/setup.py +++ b/bigquery/setup.py @@ -22,7 +22,7 @@ name = 'google-cloud-bigquery' description = 'Google BigQuery API client library' -version = '1.2.0' +version = '1.3.0' # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'